commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
91951e85caf1b928224dba1ecc33a59957187dff
|
tkp/tests/__init__.py
|
tkp/tests/__init__.py
|
import unittest
testfiles = [
'tkp.tests.accessors',
'tkp.tests.classification',
'tkp.tests.config',
'tkp.tests.coordinates',
'tkp.tests.database',
'tkp.tests.dataset',
'tkp.tests.FDR',
'tkp.tests.feature_extraction',
'tkp.tests.gaussian',
'tkp.tests.L15_12h_const',
'tkp.tests.sigmaclip',
'tkp.tests.source_measurements',
'tkp.tests.wcs',
]
# Pyrap is required for AIPS++ image support, but
# not necessary for the rest of the library.
try:
import pyrap
except:
pass
else:
testfiles.append('tkp.tests.aipsppimage')
|
import unittest
testfiles = [
'tkp.tests.accessors',
'tkp.tests.classification',
'tkp.tests.config',
'tkp.tests.coordinates',
'tkp.tests.database',
'tkp.tests.dataset',
'tkp.tests.FDR',
'tkp.tests.feature_extraction',
'tkp.tests.gaussian',
'tkp.tests.L15_12h_const',
'tkp.tests.sigmaclip',
'tkp.tests.source_measurements',
'tkp.tests.wcs',
'tkp.tests.aipsppimage'
]
|
Remove special-casing of aipsppimage test
|
Remove special-casing of aipsppimage test
We have other dependencies on pyrap too...
git-svn-id: 71bcaaf8fac6301ed959c5094abb905057e55e2d@2123 2b73c8c1-3922-0410-90dd-bc0a5c6f2ac6
|
Python
|
bsd-2-clause
|
bartscheers/tkp,mkuiack/tkp,transientskp/tkp,transientskp/tkp,mkuiack/tkp,bartscheers/tkp
|
python
|
## Code Before:
import unittest
testfiles = [
'tkp.tests.accessors',
'tkp.tests.classification',
'tkp.tests.config',
'tkp.tests.coordinates',
'tkp.tests.database',
'tkp.tests.dataset',
'tkp.tests.FDR',
'tkp.tests.feature_extraction',
'tkp.tests.gaussian',
'tkp.tests.L15_12h_const',
'tkp.tests.sigmaclip',
'tkp.tests.source_measurements',
'tkp.tests.wcs',
]
# Pyrap is required for AIPS++ image support, but
# not necessary for the rest of the library.
try:
import pyrap
except:
pass
else:
testfiles.append('tkp.tests.aipsppimage')
## Instruction:
Remove special-casing of aipsppimage test
We have other dependencies on pyrap too...
git-svn-id: 71bcaaf8fac6301ed959c5094abb905057e55e2d@2123 2b73c8c1-3922-0410-90dd-bc0a5c6f2ac6
## Code After:
import unittest
testfiles = [
'tkp.tests.accessors',
'tkp.tests.classification',
'tkp.tests.config',
'tkp.tests.coordinates',
'tkp.tests.database',
'tkp.tests.dataset',
'tkp.tests.FDR',
'tkp.tests.feature_extraction',
'tkp.tests.gaussian',
'tkp.tests.L15_12h_const',
'tkp.tests.sigmaclip',
'tkp.tests.source_measurements',
'tkp.tests.wcs',
'tkp.tests.aipsppimage'
]
|
...
'tkp.tests.sigmaclip',
'tkp.tests.source_measurements',
'tkp.tests.wcs',
'tkp.tests.aipsppimage'
]
...
|
c8e0b0e110c91120b63a62c5ae941a88c7010780
|
setup.py
|
setup.py
|
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.1a",
author = "Ethan Rowe",
author_email = "[email protected]",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.test",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.2a",
author = "Ethan Rowe",
author_email = "[email protected]",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.cases",
"merky.test",
"merky.test.usecases",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
Bump to 0.0.2a for attr. graph feature
|
Bump to 0.0.2a for attr. graph feature
Add new subpackages to the setup package list.
|
Python
|
mit
|
ethanrowe/python-merky
|
python
|
## Code Before:
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.1a",
author = "Ethan Rowe",
author_email = "[email protected]",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.test",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
## Instruction:
Bump to 0.0.2a for attr. graph feature
Add new subpackages to the setup package list.
## Code After:
from setuptools import setup
import os
setup(
name = "merky",
version = "0.0.2a",
author = "Ethan Rowe",
author_email = "[email protected]",
description = ("JSON-oriented merkle tree utilities"),
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.cases",
"merky.test",
"merky.test.usecases",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
""",
test_suite = "nose.collector",
install_requires = [
'six >= 1.5',
],
setup_requires = [
'nose',
'mock >= 1.0.1',
],
tests_require = [
'nose',
'mock >= 1.0.1',
],
)
|
...
setup(
name = "merky",
version = "0.0.2a",
author = "Ethan Rowe",
author_email = "[email protected]",
description = ("JSON-oriented merkle tree utilities"),
...
license = "MIT",
url = "https://github.com/ethanrowe/python-merky",
packages = ["merky",
"merky.cases",
"merky.test",
"merky.test.usecases",
],
long_description = """
Merky - compute merkle trees for JSON-friendly data.
...
|
9639eb34f53444387621ed0a27ef9b273b38df79
|
slackclient/_slackrequest.py
|
slackclient/_slackrequest.py
|
import json
import requests
import six
class SlackRequest(object):
@staticmethod
def do(token, request="?", post_data=None, domain="slack.com"):
'''
Perform a POST request to the Slack Web API
Args:
token (str): your authentication token
request (str): the method to call from the Slack API. For example: 'channels.list'
post_data (dict): key/value arguments to pass for the request. For example:
{'channel': 'CABC12345'}
domain (str): if for some reason you want to send your request to something other
than slack.com
'''
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
post_data[k] = json.dumps(v)
url = 'https://{0}/api/{1}'.format(domain, request)
post_data['token'] = token
return requests.post(url, data=post_data, files=files)
|
import json
import requests
import six
class SlackRequest(object):
@staticmethod
def do(token, request="?", post_data=None, domain="slack.com"):
'''
Perform a POST request to the Slack Web API
Args:
token (str): your authentication token
request (str): the method to call from the Slack API. For example: 'channels.list'
post_data (dict): key/value arguments to pass for the request. For example:
{'channel': 'CABC12345'}
domain (str): if for some reason you want to send your request to something other
than slack.com
'''
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
# Only do this for requests that are UPLOADING files; downloading files
# use the 'file' argument to point to a File ID.
upload_requests = ['files.upload']
files = None
if request in upload_requests:
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
post_data[k] = json.dumps(v)
url = 'https://{0}/api/{1}'.format(domain, request)
post_data['token'] = token
return requests.post(url, data=post_data, files=files)
|
Fix bug preventing API calls requiring a file ID
|
Fix bug preventing API calls requiring a file ID
For example, an API call to files.info takes a file ID argument named
"file", which was stripped out by this call. Currently, there is only
one request type that accepts file data (files.upload). Every other use
of 'file' is an ID that aught to be contained in the request.
|
Python
|
mit
|
slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient
|
python
|
## Code Before:
import json
import requests
import six
class SlackRequest(object):
@staticmethod
def do(token, request="?", post_data=None, domain="slack.com"):
'''
Perform a POST request to the Slack Web API
Args:
token (str): your authentication token
request (str): the method to call from the Slack API. For example: 'channels.list'
post_data (dict): key/value arguments to pass for the request. For example:
{'channel': 'CABC12345'}
domain (str): if for some reason you want to send your request to something other
than slack.com
'''
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
post_data[k] = json.dumps(v)
url = 'https://{0}/api/{1}'.format(domain, request)
post_data['token'] = token
return requests.post(url, data=post_data, files=files)
## Instruction:
Fix bug preventing API calls requiring a file ID
For example, an API call to files.info takes a file ID argument named
"file", which was stripped out by this call. Currently, there is only
one request type that accepts file data (files.upload). Every other use
of 'file' is an ID that aught to be contained in the request.
## Code After:
import json
import requests
import six
class SlackRequest(object):
@staticmethod
def do(token, request="?", post_data=None, domain="slack.com"):
'''
Perform a POST request to the Slack Web API
Args:
token (str): your authentication token
request (str): the method to call from the Slack API. For example: 'channels.list'
post_data (dict): key/value arguments to pass for the request. For example:
{'channel': 'CABC12345'}
domain (str): if for some reason you want to send your request to something other
than slack.com
'''
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
# Only do this for requests that are UPLOADING files; downloading files
# use the 'file' argument to point to a File ID.
upload_requests = ['files.upload']
files = None
if request in upload_requests:
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
post_data[k] = json.dumps(v)
url = 'https://{0}/api/{1}'.format(domain, request)
post_data['token'] = token
return requests.post(url, data=post_data, files=files)
|
# ... existing code ...
post_data = post_data or {}
# Pull file out so it isn't JSON encoded like normal fields.
# Only do this for requests that are UPLOADING files; downloading files
# use the 'file' argument to point to a File ID.
upload_requests = ['files.upload']
files = None
if request in upload_requests:
files = {'file': post_data.pop('file')} if 'file' in post_data else None
for k, v in six.iteritems(post_data):
if not isinstance(v, six.string_types):
# ... rest of the code ...
|
5d1fe61d152d2c5544982322a9f156809ea267f0
|
main.py
|
main.py
|
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if event['type'] == 'message':
message = event['text']
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
Fix crashes from misc. events
|
Fix crashes from misc. events
|
Python
|
mit
|
Spferical/slack-megahal,Spferical/matrix-chatbot,Spferical/matrix-chatbot,Spferical/matrix-megahal
|
python
|
## Code Before:
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if event['type'] == 'message':
message = event['text']
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
## Instruction:
Fix crashes from misc. events
## Code After:
from __future__ import print_function
import time
from slackclient import SlackClient
import mh_python as mh
import argparse
import random
def main():
parser = argparse.ArgumentParser(
description="Slack chatbot using MegaHAL")
parser.add_argument(
"-t", "--token", type=str, help="Slack token", required=True)
args = vars(parser.parse_args())
token = args['token']
sc = SlackClient(token)
mh.initbrain()
try:
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
print("Replying: %s" % reply)
sc.rtm_send_message(event['channel'], reply)
else:
mh.learn(message)
time.sleep(1)
else:
print("Connection Failed, invalid token?")
finally:
mh.cleanup()
if __name__ == '__main__':
main()
|
# ... existing code ...
if sc.rtm_connect():
while True:
for event in sc.rtm_read():
if 'type' in event and event['type'] == 'message' \
and 'text' in event:
message = event['text'].encode('ascii', 'ignore')
print("Handling message: %s" % message)
if random.random() < 0.1:
reply = mh.doreply(message)
# ... rest of the code ...
|
c9c2c1696c1655ab178908e9f534d6c60da09539
|
SVPullToRefresh/SVPullToRefresh.h
|
SVPullToRefresh/SVPullToRefresh.h
|
//
// SVPullToRefresh.h
// SVPullToRefreshDemo
//
// Created by Sam Vermette on 23.04.12.
// Copyright (c) 2012 samvermette.com. All rights reserved.
//
// https://github.com/samvermette/SVPullToRefresh
//
// this header file is provided for backwards compatibility and will be removed in the future
// here's how you should import SVPullToRefresh now:
#import "UIScrollView+SVPullToRefresh.h"
#import "UIScrollView+SVInfiniteScrolling.h"
|
//
// SVPullToRefresh.h
// SVPullToRefreshDemo
//
// Created by Sam Vermette on 23.04.12.
// Copyright (c) 2012 samvermette.com. All rights reserved.
//
// https://github.com/samvermette/SVPullToRefresh
//
// this header file is provided for backwards compatibility and will be removed in the future
// here's how you should import SVPullToRefresh now:
#import "UIScrollView+SVPullToRefresh.h"
#import "UIScrollView+SVInfiniteScrolling.h"
#import "SVPullToRefreshLoadingView.h"
#import "SVInfiniteScrollingLoadingView.h"
|
Add loading view to SVPUllToRefresh header
|
Add loading view to SVPUllToRefresh header
|
C
|
mit
|
csutanyu/SVPullToRefresh
|
c
|
## Code Before:
//
// SVPullToRefresh.h
// SVPullToRefreshDemo
//
// Created by Sam Vermette on 23.04.12.
// Copyright (c) 2012 samvermette.com. All rights reserved.
//
// https://github.com/samvermette/SVPullToRefresh
//
// this header file is provided for backwards compatibility and will be removed in the future
// here's how you should import SVPullToRefresh now:
#import "UIScrollView+SVPullToRefresh.h"
#import "UIScrollView+SVInfiniteScrolling.h"
## Instruction:
Add loading view to SVPUllToRefresh header
## Code After:
//
// SVPullToRefresh.h
// SVPullToRefreshDemo
//
// Created by Sam Vermette on 23.04.12.
// Copyright (c) 2012 samvermette.com. All rights reserved.
//
// https://github.com/samvermette/SVPullToRefresh
//
// this header file is provided for backwards compatibility and will be removed in the future
// here's how you should import SVPullToRefresh now:
#import "UIScrollView+SVPullToRefresh.h"
#import "UIScrollView+SVInfiniteScrolling.h"
#import "SVPullToRefreshLoadingView.h"
#import "SVInfiniteScrollingLoadingView.h"
|
# ... existing code ...
#import "UIScrollView+SVPullToRefresh.h"
#import "UIScrollView+SVInfiniteScrolling.h"
#import "SVPullToRefreshLoadingView.h"
#import "SVInfiniteScrollingLoadingView.h"
# ... rest of the code ...
|
96cc367c1330cec510bce8d0463f3ef27dc5c271
|
Shooter.java
|
Shooter.java
|
package ftc8390.vv;
import com.qualcomm.robotcore.hardware.HardwareMap;
/**
* Created by jmgu3 on 11/7/2016.
*/
public class Shooter {
public void init(HardwareMap hardwareMap) {
}
}
|
package ftc8390.vv;
import com.qualcomm.robotcore.hardware.HardwareMap;
/**
* Created by jmgu3 on 11/7/2016.
*/
public class Shooter {
public void init(HardwareMap hardwareMap) {
// MAKE SURE TO PUT SHOOTER MOTORS INTO FLOAT MODE SO THEY DON'T BREAK!!!
}
}
|
Comment about FLOAT mode for shooter motors
|
Comment about FLOAT mode for shooter motors
|
Java
|
mit
|
FTC8390/vv
|
java
|
## Code Before:
package ftc8390.vv;
import com.qualcomm.robotcore.hardware.HardwareMap;
/**
* Created by jmgu3 on 11/7/2016.
*/
public class Shooter {
public void init(HardwareMap hardwareMap) {
}
}
## Instruction:
Comment about FLOAT mode for shooter motors
## Code After:
package ftc8390.vv;
import com.qualcomm.robotcore.hardware.HardwareMap;
/**
* Created by jmgu3 on 11/7/2016.
*/
public class Shooter {
public void init(HardwareMap hardwareMap) {
// MAKE SURE TO PUT SHOOTER MOTORS INTO FLOAT MODE SO THEY DON'T BREAK!!!
}
}
|
...
public class Shooter {
public void init(HardwareMap hardwareMap) {
// MAKE SURE TO PUT SHOOTER MOTORS INTO FLOAT MODE SO THEY DON'T BREAK!!!
}
}
...
|
b466e0c41629575e0661aff1ba37c7056a732e0a
|
magicbot/__init__.py
|
magicbot/__init__.py
|
from .magicrobot import MagicRobot
from .magic_tunable import tunable
from .magic_reset import will_reset_to
from .state_machine import AutonomousStateMachine, StateMachine, state, timed_state
|
from .magicrobot import MagicRobot
from .magic_tunable import tunable
from .magic_reset import will_reset_to
from .state_machine import AutonomousStateMachine, StateMachine, default_state, state, timed_state
|
Add default_state to the magicbot exports
|
Add default_state to the magicbot exports
|
Python
|
bsd-3-clause
|
Twinters007/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities
|
python
|
## Code Before:
from .magicrobot import MagicRobot
from .magic_tunable import tunable
from .magic_reset import will_reset_to
from .state_machine import AutonomousStateMachine, StateMachine, state, timed_state
## Instruction:
Add default_state to the magicbot exports
## Code After:
from .magicrobot import MagicRobot
from .magic_tunable import tunable
from .magic_reset import will_reset_to
from .state_machine import AutonomousStateMachine, StateMachine, default_state, state, timed_state
|
# ... existing code ...
from .magic_tunable import tunable
from .magic_reset import will_reset_to
from .state_machine import AutonomousStateMachine, StateMachine, default_state, state, timed_state
# ... rest of the code ...
|
b7d35893186564bf8c4e706e5d05df06b23d2dc7
|
test/CodeGen/functions.c
|
test/CodeGen/functions.c
|
// RUN: %clang_cc1 %s -emit-llvm -o - | FileCheck %s
int g();
int foo(int i) {
return g(i);
}
int g(int i) {
return g(i);
}
// rdar://6110827
typedef void T(void);
void test3(T f) {
f();
}
int a(int);
int a() {return 1;}
// RUN: grep 'define void @f0()' %t
void f0() {}
void f1();
// RUN: grep 'call void @f1()' %t
void f2(void) {
f1(1, 2, 3);
}
// RUN: grep 'define void @f1()' %t
void f1() {}
// RUN: grep 'define .* @f3' %t | not grep -F '...'
struct foo { int X, Y, Z; } f3() {
while (1) {}
}
// PR4423 - This shouldn't crash in codegen
void f4() {}
void f5() { f4(42); }
// Qualifiers on parameter types shouldn't make a difference.
static void f6(const float f, const float g) {
}
void f7(float f, float g) {
f6(f, g);
// CHECK: define void @f7(float{{.*}}, float{{.*}})
// CHECK: call void @f6(float{{.*}}, float{{.*}})
}
|
// RUN: %clang_cc1 %s -emit-llvm -o - -verify | FileCheck %s
int g();
int foo(int i) {
return g(i);
}
int g(int i) {
return g(i);
}
// rdar://6110827
typedef void T(void);
void test3(T f) {
f();
}
int a(int);
int a() {return 1;}
void f0() {}
// CHECK: define void @f0()
void f1();
void f2(void) {
// CHECK: call void @f1()
f1(1, 2, 3);
}
// CHECK: define void @f1()
void f1() {}
// CHECK: define {{.*}} @f3()
struct foo { int X, Y, Z; } f3() {
while (1) {}
}
// PR4423 - This shouldn't crash in codegen
void f4() {}
void f5() { f4(42); } //expected-warning {{too many arguments}}
// Qualifiers on parameter types shouldn't make a difference.
static void f6(const float f, const float g) {
}
void f7(float f, float g) {
f6(f, g);
// CHECK: define void @f7(float{{.*}}, float{{.*}})
// CHECK: call void @f6(float{{.*}}, float{{.*}})
}
|
Fix test case and convert fully to FileCheck.
|
Fix test case and convert fully to FileCheck.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@97032 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: %clang_cc1 %s -emit-llvm -o - | FileCheck %s
int g();
int foo(int i) {
return g(i);
}
int g(int i) {
return g(i);
}
// rdar://6110827
typedef void T(void);
void test3(T f) {
f();
}
int a(int);
int a() {return 1;}
// RUN: grep 'define void @f0()' %t
void f0() {}
void f1();
// RUN: grep 'call void @f1()' %t
void f2(void) {
f1(1, 2, 3);
}
// RUN: grep 'define void @f1()' %t
void f1() {}
// RUN: grep 'define .* @f3' %t | not grep -F '...'
struct foo { int X, Y, Z; } f3() {
while (1) {}
}
// PR4423 - This shouldn't crash in codegen
void f4() {}
void f5() { f4(42); }
// Qualifiers on parameter types shouldn't make a difference.
static void f6(const float f, const float g) {
}
void f7(float f, float g) {
f6(f, g);
// CHECK: define void @f7(float{{.*}}, float{{.*}})
// CHECK: call void @f6(float{{.*}}, float{{.*}})
}
## Instruction:
Fix test case and convert fully to FileCheck.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@97032 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_cc1 %s -emit-llvm -o - -verify | FileCheck %s
int g();
int foo(int i) {
return g(i);
}
int g(int i) {
return g(i);
}
// rdar://6110827
typedef void T(void);
void test3(T f) {
f();
}
int a(int);
int a() {return 1;}
void f0() {}
// CHECK: define void @f0()
void f1();
void f2(void) {
// CHECK: call void @f1()
f1(1, 2, 3);
}
// CHECK: define void @f1()
void f1() {}
// CHECK: define {{.*}} @f3()
struct foo { int X, Y, Z; } f3() {
while (1) {}
}
// PR4423 - This shouldn't crash in codegen
void f4() {}
void f5() { f4(42); } //expected-warning {{too many arguments}}
// Qualifiers on parameter types shouldn't make a difference.
static void f6(const float f, const float g) {
}
void f7(float f, float g) {
f6(f, g);
// CHECK: define void @f7(float{{.*}}, float{{.*}})
// CHECK: call void @f6(float{{.*}}, float{{.*}})
}
|
// ... existing code ...
// RUN: %clang_cc1 %s -emit-llvm -o - -verify | FileCheck %s
int g();
// ... modified code ...
int a(int);
int a() {return 1;}
void f0() {}
// CHECK: define void @f0()
void f1();
void f2(void) {
// CHECK: call void @f1()
f1(1, 2, 3);
}
// CHECK: define void @f1()
void f1() {}
// CHECK: define {{.*}} @f3()
struct foo { int X, Y, Z; } f3() {
while (1) {}
}
...
// PR4423 - This shouldn't crash in codegen
void f4() {}
void f5() { f4(42); } //expected-warning {{too many arguments}}
// Qualifiers on parameter types shouldn't make a difference.
static void f6(const float f, const float g) {
// ... rest of the code ...
|
ce8c5ceca5b929280cd6f03a919b0a1f7ab0f3ea
|
buffer/src/main/java/org/nguyenhuy/buffer/job/Job.java
|
buffer/src/main/java/org/nguyenhuy/buffer/job/Job.java
|
package org.nguyenhuy.buffer.job;
import android.os.Handler;
import com.path.android.jobqueue.Params;
import com.squareup.otto.Bus;
import javax.inject.Inject;
/**
* Created by nguyenthanhhuy on 1/14/14.
*/
public abstract class Job extends com.path.android.jobqueue.Job {
@Inject
transient Bus bus;
@Inject
transient Handler mainHandler;
protected Job(Params params) {
super(params);
}
}
|
package org.nguyenhuy.buffer.job;
import android.os.Handler;
import com.path.android.jobqueue.Params;
import com.squareup.otto.Bus;
import javax.inject.Inject;
/**
* Created by nguyenthanhhuy on 1/14/14.
*/
public abstract class Job extends com.path.android.jobqueue.Job {
private static final int DEFAULT_RETRY_LIMIT = 3;
@Inject
transient Bus bus;
@Inject
transient Handler mainHandler;
protected Job(Params params) {
super(params);
}
@Override
protected int getRetryLimit() {
return DEFAULT_RETRY_LIMIT;
}
}
|
Change retry limit from 20 down to 3, to fail fast.
|
Change retry limit from 20 down to 3, to fail fast.
|
Java
|
mit
|
nguyenhuy/buffer,nguyenhuy/buffer
|
java
|
## Code Before:
package org.nguyenhuy.buffer.job;
import android.os.Handler;
import com.path.android.jobqueue.Params;
import com.squareup.otto.Bus;
import javax.inject.Inject;
/**
* Created by nguyenthanhhuy on 1/14/14.
*/
public abstract class Job extends com.path.android.jobqueue.Job {
@Inject
transient Bus bus;
@Inject
transient Handler mainHandler;
protected Job(Params params) {
super(params);
}
}
## Instruction:
Change retry limit from 20 down to 3, to fail fast.
## Code After:
package org.nguyenhuy.buffer.job;
import android.os.Handler;
import com.path.android.jobqueue.Params;
import com.squareup.otto.Bus;
import javax.inject.Inject;
/**
* Created by nguyenthanhhuy on 1/14/14.
*/
public abstract class Job extends com.path.android.jobqueue.Job {
private static final int DEFAULT_RETRY_LIMIT = 3;
@Inject
transient Bus bus;
@Inject
transient Handler mainHandler;
protected Job(Params params) {
super(params);
}
@Override
protected int getRetryLimit() {
return DEFAULT_RETRY_LIMIT;
}
}
|
# ... existing code ...
* Created by nguyenthanhhuy on 1/14/14.
*/
public abstract class Job extends com.path.android.jobqueue.Job {
private static final int DEFAULT_RETRY_LIMIT = 3;
@Inject
transient Bus bus;
@Inject
# ... modified code ...
protected Job(Params params) {
super(params);
}
@Override
protected int getRetryLimit() {
return DEFAULT_RETRY_LIMIT;
}
}
# ... rest of the code ...
|
246f39a6ac5dab8300600e5776da0e0b02bc6706
|
Client/src/main/java/org/xdi/oxauth/client/fido/u2f/RegistrationRequestService.java
|
Client/src/main/java/org/xdi/oxauth/client/fido/u2f/RegistrationRequestService.java
|
/*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.xdi.oxauth.client.fido.u2f;
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterRequestMessage;
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterStatus;
import javax.ws.rs.*;
/**
* Еhe endpoint allows to start and finish U2F registration process
*
* @author Yuriy Movchan
* @version August 9, 2017
*/
public interface RegistrationRequestService {
@GET
@Produces({"application/json"})
public RegisterRequestMessage startRegistration(@QueryParam("username") String userName, @QueryParam("application") String appId, @QueryParam("session_id") String sessionId);
@POST
@Produces({"application/json"})
public RegisterStatus finishRegistration(@FormParam("username") String userName, @FormParam("tokenResponse") String registerResponseString);
}
|
/*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.xdi.oxauth.client.fido.u2f;
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterRequestMessage;
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterStatus;
import javax.ws.rs.*;
import javax.ws.rs.core.Response;
/**
* Еhe endpoint allows to start and finish U2F registration process
*
* @author Yuriy Movchan
* @version August 9, 2017
*/
public interface RegistrationRequestService {
@GET
@Produces({"application/json"})
public RegisterRequestMessage startRegistration(@QueryParam("username") String userName, @QueryParam("application") String appId, @QueryParam("session_id") String sessionId);
@GET
@Produces({"application/json"})
public RegisterRequestMessage startRegistration(@QueryParam("username") String userName, @QueryParam("application") String appId, @QueryParam("session_id") String sessionId, @QueryParam("enrollment_code") String enrollmentCode);
@POST
@Produces({"application/json"})
public RegisterStatus finishRegistration(@FormParam("username") String userName, @FormParam("tokenResponse") String registerResponseString);
}
|
Add enrollment_code to U2F client
|
Add enrollment_code to U2F client
|
Java
|
mit
|
madumlao/oxAuth,madumlao/oxAuth,GluuFederation/oxAuth,madumlao/oxAuth,GluuFederation/oxAuth,GluuFederation/oxAuth,GluuFederation/oxAuth,madumlao/oxAuth,madumlao/oxAuth,GluuFederation/oxAuth
|
java
|
## Code Before:
/*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.xdi.oxauth.client.fido.u2f;
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterRequestMessage;
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterStatus;
import javax.ws.rs.*;
/**
* Еhe endpoint allows to start and finish U2F registration process
*
* @author Yuriy Movchan
* @version August 9, 2017
*/
public interface RegistrationRequestService {
@GET
@Produces({"application/json"})
public RegisterRequestMessage startRegistration(@QueryParam("username") String userName, @QueryParam("application") String appId, @QueryParam("session_id") String sessionId);
@POST
@Produces({"application/json"})
public RegisterStatus finishRegistration(@FormParam("username") String userName, @FormParam("tokenResponse") String registerResponseString);
}
## Instruction:
Add enrollment_code to U2F client
## Code After:
/*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.xdi.oxauth.client.fido.u2f;
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterRequestMessage;
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterStatus;
import javax.ws.rs.*;
import javax.ws.rs.core.Response;
/**
* Еhe endpoint allows to start and finish U2F registration process
*
* @author Yuriy Movchan
* @version August 9, 2017
*/
public interface RegistrationRequestService {
@GET
@Produces({"application/json"})
public RegisterRequestMessage startRegistration(@QueryParam("username") String userName, @QueryParam("application") String appId, @QueryParam("session_id") String sessionId);
@GET
@Produces({"application/json"})
public RegisterRequestMessage startRegistration(@QueryParam("username") String userName, @QueryParam("application") String appId, @QueryParam("session_id") String sessionId, @QueryParam("enrollment_code") String enrollmentCode);
@POST
@Produces({"application/json"})
public RegisterStatus finishRegistration(@FormParam("username") String userName, @FormParam("tokenResponse") String registerResponseString);
}
|
# ... existing code ...
import org.xdi.oxauth.model.fido.u2f.protocol.RegisterStatus;
import javax.ws.rs.*;
import javax.ws.rs.core.Response;
/**
* Еhe endpoint allows to start and finish U2F registration process
# ... modified code ...
@Produces({"application/json"})
public RegisterRequestMessage startRegistration(@QueryParam("username") String userName, @QueryParam("application") String appId, @QueryParam("session_id") String sessionId);
@GET
@Produces({"application/json"})
public RegisterRequestMessage startRegistration(@QueryParam("username") String userName, @QueryParam("application") String appId, @QueryParam("session_id") String sessionId, @QueryParam("enrollment_code") String enrollmentCode);
@POST
@Produces({"application/json"})
public RegisterStatus finishRegistration(@FormParam("username") String userName, @FormParam("tokenResponse") String registerResponseString);
# ... rest of the code ...
|
ba42df4296a02396e823ee9692fb84eb0deb8b7c
|
corehq/messaging/smsbackends/start_enterprise/views.py
|
corehq/messaging/smsbackends/start_enterprise/views.py
|
from __future__ import absolute_import
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
from __future__ import absolute_import
import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
Add logging to delivery receipt view
|
Add logging to delivery receipt view
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
python
|
## Code Before:
from __future__ import absolute_import
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
## Instruction:
Add logging to delivery receipt view
## Code After:
from __future__ import absolute_import
import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
StartEnterpriseDeliveryReceipt,
)
from datetime import datetime
from django.http import HttpResponse, HttpResponseBadRequest
class StartEnterpriseDeliveryReceiptView(IncomingBackendView):
urlname = 'start_enterprise_dlr'
@property
def backend_class(self):
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
message_id = request.GET.get('msgid')
if not message_id:
return HttpResponseBadRequest("Missing 'msgid'")
message_id = message_id.strip()
try:
dlr = StartEnterpriseDeliveryReceipt.objects.get(message_id=message_id)
except StartEnterpriseDeliveryReceipt.DoesNotExist:
dlr = None
if dlr:
dlr.received_on = datetime.utcnow()
dlr.info = request.GET.dict()
dlr.save()
# Based on the documentation, a response of "1" acknowledges receipt of the DLR
return HttpResponse("1")
|
// ... existing code ...
from __future__ import absolute_import
import logging
from corehq.apps.sms.views import IncomingBackendView
from corehq.messaging.smsbackends.start_enterprise.models import (
StartEnterpriseBackend,
// ... modified code ...
return StartEnterpriseBackend
def get(self, request, api_key, *args, **kwargs):
logging.info("Received Start Enterprise delivery receipt with items: %s" % request.GET.dict().keys())
message_id = request.GET.get('msgid')
if not message_id:
// ... rest of the code ...
|
ce29f011a72bf695c9b0840ad4c121f85c9fcad1
|
mica/stats/tests/test_guide_stats.py
|
mica/stats/tests/test_guide_stats.py
|
import tempfile
import os
from .. import guide_stats
def test_calc_stats():
guide_stats.calc_stats(17210)
def test_make_gui_stats():
"""
Save the guide stats for one obsid into a newly-created table
"""
# Get a temporary file, but then delete it, because _save_acq_stats will only
# make a new table if the supplied file doesn't exist
fh, fn = tempfile.mkstemp(suffix='.h5')
os.unlink(fn)
guide_stats.TABLE_FILE = fn
obsid = 20001
obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid)
t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp)
guide_stats._save_gui_stats(t)
os.unlink(fn)
|
import tempfile
import os
from .. import guide_stats
def test_calc_stats():
guide_stats.calc_stats(17210)
def test_calc_stats_with_bright_trans():
s = guide_stats.calc_stats(17472)
# Assert that the std on the slot 7 residuals are reasonable
# even in this obsid that had a transition to BRIT
assert s[1][7]['dr_std'] < 1
def test_make_gui_stats():
"""
Save the guide stats for one obsid into a newly-created table
"""
# Get a temporary file, but then delete it, because _save_acq_stats will only
# make a new table if the supplied file doesn't exist
fh, fn = tempfile.mkstemp(suffix='.h5')
os.unlink(fn)
guide_stats.TABLE_FILE = fn
obsid = 20001
obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid)
t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp)
guide_stats._save_gui_stats(t)
os.unlink(fn)
|
Add test to confirm more reasonable residual std on one obsid/slot
|
Add test to confirm more reasonable residual std on one obsid/slot
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
python
|
## Code Before:
import tempfile
import os
from .. import guide_stats
def test_calc_stats():
guide_stats.calc_stats(17210)
def test_make_gui_stats():
"""
Save the guide stats for one obsid into a newly-created table
"""
# Get a temporary file, but then delete it, because _save_acq_stats will only
# make a new table if the supplied file doesn't exist
fh, fn = tempfile.mkstemp(suffix='.h5')
os.unlink(fn)
guide_stats.TABLE_FILE = fn
obsid = 20001
obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid)
t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp)
guide_stats._save_gui_stats(t)
os.unlink(fn)
## Instruction:
Add test to confirm more reasonable residual std on one obsid/slot
## Code After:
import tempfile
import os
from .. import guide_stats
def test_calc_stats():
guide_stats.calc_stats(17210)
def test_calc_stats_with_bright_trans():
s = guide_stats.calc_stats(17472)
# Assert that the std on the slot 7 residuals are reasonable
# even in this obsid that had a transition to BRIT
assert s[1][7]['dr_std'] < 1
def test_make_gui_stats():
"""
Save the guide stats for one obsid into a newly-created table
"""
# Get a temporary file, but then delete it, because _save_acq_stats will only
# make a new table if the supplied file doesn't exist
fh, fn = tempfile.mkstemp(suffix='.h5')
os.unlink(fn)
guide_stats.TABLE_FILE = fn
obsid = 20001
obsid_info, gui, star_info, catalog, temp = guide_stats.calc_stats(obsid)
t = guide_stats.table_gui_stats(obsid_info, gui, star_info, catalog, temp)
guide_stats._save_gui_stats(t)
os.unlink(fn)
|
...
def test_calc_stats():
guide_stats.calc_stats(17210)
def test_calc_stats_with_bright_trans():
s = guide_stats.calc_stats(17472)
# Assert that the std on the slot 7 residuals are reasonable
# even in this obsid that had a transition to BRIT
assert s[1][7]['dr_std'] < 1
def test_make_gui_stats():
"""
...
|
efa32f814e5b50408668537f40fd84abddd80235
|
libraries/core/src/test/java/com/paritytrading/nassau/binaryfile/BinaryFILEReaderTest.java
|
libraries/core/src/test/java/com/paritytrading/nassau/binaryfile/BinaryFILEReaderTest.java
|
package com.paritytrading.nassau.binaryfile;
import static com.paritytrading.nassau.binaryfile.BinaryFILEStatus.*;
import static java.util.Arrays.*;
import static org.junit.Assert.*;
import com.paritytrading.nassau.Messages;
import com.paritytrading.nassau.Strings;
import java.io.InputStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class BinaryFILEReaderTest {
private InputStream stream;
private Messages<String> messages;
@Before
public void setUp() throws Exception {
stream = getClass().getResourceAsStream("/binaryfile.dat");
messages = new Messages<>(Strings.MESSAGE_PARSER);
}
@After
public void tearDown() throws Exception {
stream.close();
}
@Test
public void readStream() throws Exception {
BinaryFILEReader reader = new BinaryFILEReader(stream, messages);
while (reader.read() >= 0);
assertEquals(asList("foo", "bar", "baz", "quux", ""), messages.collect());
}
@Test
public void readStreamWithStatusListener() throws Exception {
BinaryFILEStatus status = new BinaryFILEStatus();
BinaryFILEStatusParser parser = new BinaryFILEStatusParser(messages, status);
BinaryFILEReader reader = new BinaryFILEReader(stream, parser);
while (reader.read() >= 0);
assertEquals(asList("foo", "bar", "baz", "quux"), messages.collect());
assertEquals(asList(new EndOfSession()), status.collect());
}
}
|
package com.paritytrading.nassau.binaryfile;
import static com.paritytrading.nassau.binaryfile.BinaryFILEStatus.*;
import static java.util.Arrays.*;
import static org.junit.Assert.*;
import com.paritytrading.nassau.Messages;
import com.paritytrading.nassau.Strings;
import java.io.InputStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class BinaryFILEReaderTest {
private InputStream stream;
private Messages<String> messages;
@Before
public void setUp() throws Exception {
stream = getClass().getResourceAsStream("/binaryfile.dat");
messages = new Messages<>(Strings.MESSAGE_PARSER);
}
@After
public void tearDown() throws Exception {
stream.close();
}
@Test
public void readStream() throws Exception {
try (BinaryFILEReader reader = new BinaryFILEReader(stream, messages)) {
while (reader.read() >= 0);
}
assertEquals(asList("foo", "bar", "baz", "quux", ""), messages.collect());
}
@Test
public void readStreamWithStatusListener() throws Exception {
BinaryFILEStatus status = new BinaryFILEStatus();
BinaryFILEStatusParser parser = new BinaryFILEStatusParser(messages, status);
try (BinaryFILEReader reader = new BinaryFILEReader(stream, parser)) {
while (reader.read() >= 0);
}
assertEquals(asList("foo", "bar", "baz", "quux"), messages.collect());
assertEquals(asList(new EndOfSession()), status.collect());
}
}
|
Use try-with-resources to close as application code should.
|
Use try-with-resources to close as application code should.
|
Java
|
apache-2.0
|
paritytrading/nassau,pmcs/nassau,paritytrading/nassau,pmcs/nassau
|
java
|
## Code Before:
package com.paritytrading.nassau.binaryfile;
import static com.paritytrading.nassau.binaryfile.BinaryFILEStatus.*;
import static java.util.Arrays.*;
import static org.junit.Assert.*;
import com.paritytrading.nassau.Messages;
import com.paritytrading.nassau.Strings;
import java.io.InputStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class BinaryFILEReaderTest {
private InputStream stream;
private Messages<String> messages;
@Before
public void setUp() throws Exception {
stream = getClass().getResourceAsStream("/binaryfile.dat");
messages = new Messages<>(Strings.MESSAGE_PARSER);
}
@After
public void tearDown() throws Exception {
stream.close();
}
@Test
public void readStream() throws Exception {
BinaryFILEReader reader = new BinaryFILEReader(stream, messages);
while (reader.read() >= 0);
assertEquals(asList("foo", "bar", "baz", "quux", ""), messages.collect());
}
@Test
public void readStreamWithStatusListener() throws Exception {
BinaryFILEStatus status = new BinaryFILEStatus();
BinaryFILEStatusParser parser = new BinaryFILEStatusParser(messages, status);
BinaryFILEReader reader = new BinaryFILEReader(stream, parser);
while (reader.read() >= 0);
assertEquals(asList("foo", "bar", "baz", "quux"), messages.collect());
assertEquals(asList(new EndOfSession()), status.collect());
}
}
## Instruction:
Use try-with-resources to close as application code should.
## Code After:
package com.paritytrading.nassau.binaryfile;
import static com.paritytrading.nassau.binaryfile.BinaryFILEStatus.*;
import static java.util.Arrays.*;
import static org.junit.Assert.*;
import com.paritytrading.nassau.Messages;
import com.paritytrading.nassau.Strings;
import java.io.InputStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class BinaryFILEReaderTest {
private InputStream stream;
private Messages<String> messages;
@Before
public void setUp() throws Exception {
stream = getClass().getResourceAsStream("/binaryfile.dat");
messages = new Messages<>(Strings.MESSAGE_PARSER);
}
@After
public void tearDown() throws Exception {
stream.close();
}
@Test
public void readStream() throws Exception {
try (BinaryFILEReader reader = new BinaryFILEReader(stream, messages)) {
while (reader.read() >= 0);
}
assertEquals(asList("foo", "bar", "baz", "quux", ""), messages.collect());
}
@Test
public void readStreamWithStatusListener() throws Exception {
BinaryFILEStatus status = new BinaryFILEStatus();
BinaryFILEStatusParser parser = new BinaryFILEStatusParser(messages, status);
try (BinaryFILEReader reader = new BinaryFILEReader(stream, parser)) {
while (reader.read() >= 0);
}
assertEquals(asList("foo", "bar", "baz", "quux"), messages.collect());
assertEquals(asList(new EndOfSession()), status.collect());
}
}
|
# ... existing code ...
@Test
public void readStream() throws Exception {
try (BinaryFILEReader reader = new BinaryFILEReader(stream, messages)) {
while (reader.read() >= 0);
}
assertEquals(asList("foo", "bar", "baz", "quux", ""), messages.collect());
}
# ... modified code ...
BinaryFILEStatusParser parser = new BinaryFILEStatusParser(messages, status);
try (BinaryFILEReader reader = new BinaryFILEReader(stream, parser)) {
while (reader.read() >= 0);
}
assertEquals(asList("foo", "bar", "baz", "quux"), messages.collect());
assertEquals(asList(new EndOfSession()), status.collect());
# ... rest of the code ...
|
1e001eb11938bd5c613e655f86943167cd945d50
|
local_sync_client.py
|
local_sync_client.py
|
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
|
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = None
if os.path.exists(object_path):
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
if object_stat is not None:
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
|
Fix bug which caused put_object in LocalSyncClient to fail on create
|
Fix bug which caused put_object in LocalSyncClient to fail on create
|
Python
|
mit
|
MichaelAquilina/s3backup,MichaelAquilina/s3backup
|
python
|
## Code Before:
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
## Instruction:
Fix bug which caused put_object in LocalSyncClient to fail on create
## Code After:
import os
class LocalSyncClient(object):
def __init__(self, local_dir):
self.local_dir = local_dir
def get_object_timestamp(self, key):
object_path = os.path.join(self.local_dir, key)
if os.path.exists(object_path):
return os.stat(object_path).st_mtime
else:
return None
def update_sync_index(self):
pass
def keys(self):
for item in os.listdir(self.local_dir):
if item.startswith('.'):
continue
if os.path.isfile(os.path.join(self.local_dir, item)):
yield item
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = None
if os.path.exists(object_path):
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
if object_stat is not None:
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
|
# ... existing code ...
def put_object(self, key, fp, timestamp):
object_path = os.path.join(self.local_dir, key)
object_stat = None
if os.path.exists(object_path):
object_stat = os.stat(object_path)
with open(object_path, 'wb') as fp2:
fp2.write(fp.read())
if object_stat is not None:
os.utime(object_path, (object_stat.st_atime, timestamp))
def get_object(self, key):
return open(os.path.join(self.local_dir, key), 'rb')
# ... rest of the code ...
|
82178af68dde7754cade01e9d5f092c9889ab957
|
tomorrow_corrector/bot.py
|
tomorrow_corrector/bot.py
|
import praw, time
# replace with your username/password
username, password = USERNAME, PASSWORD
r = praw.Reddit(user_agent='A "tomorrow"-misspelling corrector by /u/tomorrow_corrector')
r.login(username, password, disable_warning=True)
def run_bot():
'''Check /r/all for mispellings in comments and reply to them.'''
subreddit = r.get_subreddit('all')
comments = subreddit.get_comments(limit=25)
for comment in comments:
while True:
run_bot()
time.sleep(30)
|
import praw, time
# replace with your username/password
username, password = USERNAME, PASSWORD
r = praw.Reddit(user_agent='A "tomorrow"-misspelling corrector by /u/tomorrow_corrector')
r.login(username, password, disable_warning=True)
misspellings = ['tommorow', 'tommorrow', 'tomorow']
comment_cache = []
def run_bot():
'''Check /r/all for mispellings in comments and reply to them.'''
subreddit = r.get_subreddit('all')
comments = subreddit.get_comments(limit=25)
for comment in comments:
if any(string in comment.body.lower() for string in misspellings) and not comment.id in comment_cache:
comment.reply('I think you meant "tomorrow".')
comment_cache.append(comment.id)
while True:
run_bot()
time.sleep(30)
|
Check if comment body contains misspelling, reply if so
|
Check if comment body contains misspelling, reply if so
|
Python
|
mit
|
kshvmdn/reddit-bots
|
python
|
## Code Before:
import praw, time
# replace with your username/password
username, password = USERNAME, PASSWORD
r = praw.Reddit(user_agent='A "tomorrow"-misspelling corrector by /u/tomorrow_corrector')
r.login(username, password, disable_warning=True)
def run_bot():
'''Check /r/all for mispellings in comments and reply to them.'''
subreddit = r.get_subreddit('all')
comments = subreddit.get_comments(limit=25)
for comment in comments:
while True:
run_bot()
time.sleep(30)
## Instruction:
Check if comment body contains misspelling, reply if so
## Code After:
import praw, time
# replace with your username/password
username, password = USERNAME, PASSWORD
r = praw.Reddit(user_agent='A "tomorrow"-misspelling corrector by /u/tomorrow_corrector')
r.login(username, password, disable_warning=True)
misspellings = ['tommorow', 'tommorrow', 'tomorow']
comment_cache = []
def run_bot():
'''Check /r/all for mispellings in comments and reply to them.'''
subreddit = r.get_subreddit('all')
comments = subreddit.get_comments(limit=25)
for comment in comments:
if any(string in comment.body.lower() for string in misspellings) and not comment.id in comment_cache:
comment.reply('I think you meant "tomorrow".')
comment_cache.append(comment.id)
while True:
run_bot()
time.sleep(30)
|
# ... existing code ...
r = praw.Reddit(user_agent='A "tomorrow"-misspelling corrector by /u/tomorrow_corrector')
r.login(username, password, disable_warning=True)
misspellings = ['tommorow', 'tommorrow', 'tomorow']
comment_cache = []
def run_bot():
'''Check /r/all for mispellings in comments and reply to them.'''
# ... modified code ...
subreddit = r.get_subreddit('all')
comments = subreddit.get_comments(limit=25)
for comment in comments:
if any(string in comment.body.lower() for string in misspellings) and not comment.id in comment_cache:
comment.reply('I think you meant "tomorrow".')
comment_cache.append(comment.id)
while True:
run_bot()
# ... rest of the code ...
|
153025aaa585e70d09509248ab18b214194759ae
|
tasks/static.py
|
tasks/static.py
|
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c compass.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c compass.rb")
except KeyboardInterrupt:
pass
|
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c config.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c config.rb")
except KeyboardInterrupt:
pass
|
Deal with the compass.rb -> config.rb change
|
Deal with the compass.rb -> config.rb change
|
Python
|
apache-2.0
|
techtonik/warehouse,techtonik/warehouse
|
python
|
## Code Before:
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c compass.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c compass.rb")
except KeyboardInterrupt:
pass
## Instruction:
Deal with the compass.rb -> config.rb change
## Code After:
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c config.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c config.rb")
except KeyboardInterrupt:
pass
|
...
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c config.rb --force")
@invoke.task
...
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c config.rb")
except KeyboardInterrupt:
pass
...
|
b668e1ab2c2fbd973f3bf8865db79f0a7b37141f
|
tasks.py
|
tasks.py
|
import re
from invoke import task
def get_version():
return re.search(r"""__version__\s+=\s+(?P<quote>['"])(?P<version>.+?)(?P=quote)""", open('aiodns/__init__.py').read()).group('version')
@task
def release(c):
version = get_version()
c.run("git tag -a aiodns-{0} -m \"aiodns {0} release\"".format(version))
c.run("git push --tags")
c.run("python setup.py sdist")
c.run("twine upload -r pypi dist/aiodns-{0}*".format(version))
|
import re
from invoke import task
def get_version():
return re.search(r"""__version__\s+=\s+(?P<quote>['"])(?P<version>.+?)(?P=quote)""", open('aiodns/__init__.py').read()).group('version')
@task
def release(c):
version = get_version()
c.run("git tag -a aiodns-{0} -m \"aiodns {0} release\"".format(version))
c.run("git push --tags")
c.run("python setup.py sdist")
c.run("python setup.py bdist_wheel")
c.run("twine upload -r pypi dist/aiodns-{0}*".format(version))
|
Build universal wheels when releasing
|
Build universal wheels when releasing
|
Python
|
mit
|
saghul/aiodns
|
python
|
## Code Before:
import re
from invoke import task
def get_version():
return re.search(r"""__version__\s+=\s+(?P<quote>['"])(?P<version>.+?)(?P=quote)""", open('aiodns/__init__.py').read()).group('version')
@task
def release(c):
version = get_version()
c.run("git tag -a aiodns-{0} -m \"aiodns {0} release\"".format(version))
c.run("git push --tags")
c.run("python setup.py sdist")
c.run("twine upload -r pypi dist/aiodns-{0}*".format(version))
## Instruction:
Build universal wheels when releasing
## Code After:
import re
from invoke import task
def get_version():
return re.search(r"""__version__\s+=\s+(?P<quote>['"])(?P<version>.+?)(?P=quote)""", open('aiodns/__init__.py').read()).group('version')
@task
def release(c):
version = get_version()
c.run("git tag -a aiodns-{0} -m \"aiodns {0} release\"".format(version))
c.run("git push --tags")
c.run("python setup.py sdist")
c.run("python setup.py bdist_wheel")
c.run("twine upload -r pypi dist/aiodns-{0}*".format(version))
|
...
c.run("git push --tags")
c.run("python setup.py sdist")
c.run("python setup.py bdist_wheel")
c.run("twine upload -r pypi dist/aiodns-{0}*".format(version))
...
|
9ae919b1d81ca6e640dd96e6ef7aeaeba2fc2679
|
schedule/migrations/0011_event_calendar_not_null.py
|
schedule/migrations/0011_event_calendar_not_null.py
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
Sort imports per isort; fixes failure
|
Sort imports per isort; fixes failure
|
Python
|
bsd-3-clause
|
llazzaro/django-scheduler,llazzaro/django-scheduler,llazzaro/django-scheduler
|
python
|
## Code Before:
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
## Instruction:
Sort imports per isort; fixes failure
## Code After:
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
# ... existing code ...
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
# ... rest of the code ...
|
53dc86ace10f73832c0cbca9fcbc0389999a0e1c
|
hyperion/util/convenience.py
|
hyperion/util/convenience.py
|
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
|
import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
|
Deal with the case of large radii for optically thin temperature radius
|
Deal with the case of large radii for optically thin temperature radius
|
Python
|
bsd-2-clause
|
hyperion-rt/hyperion,bluescarni/hyperion,hyperion-rt/hyperion,astrofrog/hyperion,astrofrog/hyperion,bluescarni/hyperion,hyperion-rt/hyperion
|
python
|
## Code Before:
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
return self.value * rstar \
* (1. - (1. - 2. * (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)) ** 2.) ** -0.5
## Instruction:
Deal with the case of large radii for optically thin temperature radius
## Code After:
import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
self.temperature = temperature
self.value = value
def __mul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __rmul__(self, value):
return OptThinRadius(self.temperature, value=self.value * value)
def __str__(self):
return "%g times the dust sublimation radius" % self.n
def evaluate(self, star, dust):
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
|
# ... existing code ...
import numpy as np
class OptThinRadius(object):
def __init__(self, temperature, value=1.):
# ... modified code ...
rstar = star.radius
tstar = star.effective_temperature()
nu, fnu = star.total_spectrum()
x = (self.temperature / tstar) ** 4. \
* dust.kappa_planck_temperature(self.temperature) \
/ dust.kappa_planck_spectrum(nu, fnu)
if x < 0.001:
r = self.value * rstar / 2. / np.sqrt(x)
else:
r = self.value * rstar / np.sqrt(1. - (1. - 2. * x) ** 2.)
return r
# ... rest of the code ...
|
2c8351ff8691eb9ad3009d316d932528d6f5c57d
|
runtests.py
|
runtests.py
|
import sys
import os
import django
from django.conf import settings
from django.core.management import call_command
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
opts = {'INSTALLED_APPS': ['widget_tweaks']}
if django.VERSION[:2] < (1, 5):
opts['DATABASES'] = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':MEMORY:',
}
}
if django.VERSION[:2] >= (1, 10):
opts['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
},
]
settings.configure(**opts)
if django.VERSION[:2] >= (1, 7):
django.setup()
if __name__ == "__main__":
call_command('test', 'widget_tweaks')
|
import sys
import os
import django
from django.conf import settings
from django.core.management import call_command
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
opts = {'INSTALLED_APPS': ['widget_tweaks']}
if django.VERSION[:2] < (1, 5):
opts['DATABASES'] = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':MEMORY:',
}
}
if django.VERSION[:2] >= (1, 10):
opts['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
},
]
settings.configure(**opts)
if django.VERSION[:2] >= (1, 7):
django.setup()
if __name__ == "__main__":
call_command('test', 'widget_tweaks', verbosity=2)
|
Add more verbosity on test running
|
:lipstick: Add more verbosity on test running
|
Python
|
mit
|
kmike/django-widget-tweaks,daniboy/django-widget-tweaks
|
python
|
## Code Before:
import sys
import os
import django
from django.conf import settings
from django.core.management import call_command
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
opts = {'INSTALLED_APPS': ['widget_tweaks']}
if django.VERSION[:2] < (1, 5):
opts['DATABASES'] = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':MEMORY:',
}
}
if django.VERSION[:2] >= (1, 10):
opts['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
},
]
settings.configure(**opts)
if django.VERSION[:2] >= (1, 7):
django.setup()
if __name__ == "__main__":
call_command('test', 'widget_tweaks')
## Instruction:
:lipstick: Add more verbosity on test running
## Code After:
import sys
import os
import django
from django.conf import settings
from django.core.management import call_command
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
opts = {'INSTALLED_APPS': ['widget_tweaks']}
if django.VERSION[:2] < (1, 5):
opts['DATABASES'] = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':MEMORY:',
}
}
if django.VERSION[:2] >= (1, 10):
opts['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
},
]
settings.configure(**opts)
if django.VERSION[:2] >= (1, 7):
django.setup()
if __name__ == "__main__":
call_command('test', 'widget_tweaks', verbosity=2)
|
// ... existing code ...
django.setup()
if __name__ == "__main__":
call_command('test', 'widget_tweaks', verbosity=2)
// ... rest of the code ...
|
76728fcba7671575053620da9e1e26aaa279547a
|
awx/main/notifications/webhook_backend.py
|
awx/main/notifications/webhook_backend.py
|
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
Set a user agent for the webhook if not provided
|
Set a user agent for the webhook if not provided
|
Python
|
apache-2.0
|
wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx
|
python
|
## Code Before:
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
## Instruction:
Set a user agent for the webhook if not provided
## Code After:
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
# ... existing code ...
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
# ... modified code ...
def send_messages(self, messages):
sent_messages = 0
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
# ... rest of the code ...
|
bb6b6b46860f6e03abc4ac9c47751fe4309f0e17
|
md2pdf/core.py
|
md2pdf/core.py
|
from markdown2 import markdown, markdown_path
from weasyprint import HTML, CSS
from .exceptions import ValidationError
__title__ = 'md2pdf'
__version__ = '0.2.1'
__author__ = 'Julien Maupetit'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013 Julien Maupetit'
def md2pdf(pdf_file_path, md_content=None, md_file_path=None,
css_file_path=None):
"""
Convert markdown file to pdf with styles
"""
# Convert markdown to html
raw_html = ""
extras = ["cuddled-lists"]
if md_file_path:
raw_html = markdown_path(md_file_path, extras=extras)
elif md_content:
raw_html = markdown(md_content, extras=extras)
if not len(raw_html):
raise ValidationError('Input markdown seems empty')
# Weasyprint HTML object
html = HTML(string=raw_html)
# Get styles
css = []
if css_file_path:
css.append(CSS(filename=css_file_path))
# Generate PDF
html.write_pdf(pdf_file_path, stylesheets=css)
return
|
from markdown2 import markdown, markdown_path
from weasyprint import HTML, CSS
from .exceptions import ValidationError
__title__ = 'md2pdf'
__version__ = '0.2.1'
__author__ = 'Julien Maupetit'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013 Julien Maupetit'
def md2pdf(pdf_file_path, md_content=None, md_file_path=None,
css_file_path=None, base_url=None):
"""
Convert markdown file to pdf with styles
"""
# Convert markdown to html
raw_html = ""
extras = ["cuddled-lists"]
if md_file_path:
raw_html = markdown_path(md_file_path, extras=extras)
elif md_content:
raw_html = markdown(md_content, extras=extras)
if not len(raw_html):
raise ValidationError('Input markdown seems empty')
# Weasyprint HTML object
html = HTML(string=raw_html, base_url=base_url)
# Get styles
css = []
if css_file_path:
css.append(CSS(filename=css_file_path))
# Generate PDF
html.write_pdf(pdf_file_path, stylesheets=css)
return
|
Allow to add a base url to find media
|
Allow to add a base url to find media
|
Python
|
mit
|
jmaupetit/md2pdf
|
python
|
## Code Before:
from markdown2 import markdown, markdown_path
from weasyprint import HTML, CSS
from .exceptions import ValidationError
__title__ = 'md2pdf'
__version__ = '0.2.1'
__author__ = 'Julien Maupetit'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013 Julien Maupetit'
def md2pdf(pdf_file_path, md_content=None, md_file_path=None,
css_file_path=None):
"""
Convert markdown file to pdf with styles
"""
# Convert markdown to html
raw_html = ""
extras = ["cuddled-lists"]
if md_file_path:
raw_html = markdown_path(md_file_path, extras=extras)
elif md_content:
raw_html = markdown(md_content, extras=extras)
if not len(raw_html):
raise ValidationError('Input markdown seems empty')
# Weasyprint HTML object
html = HTML(string=raw_html)
# Get styles
css = []
if css_file_path:
css.append(CSS(filename=css_file_path))
# Generate PDF
html.write_pdf(pdf_file_path, stylesheets=css)
return
## Instruction:
Allow to add a base url to find media
## Code After:
from markdown2 import markdown, markdown_path
from weasyprint import HTML, CSS
from .exceptions import ValidationError
__title__ = 'md2pdf'
__version__ = '0.2.1'
__author__ = 'Julien Maupetit'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013 Julien Maupetit'
def md2pdf(pdf_file_path, md_content=None, md_file_path=None,
css_file_path=None, base_url=None):
"""
Convert markdown file to pdf with styles
"""
# Convert markdown to html
raw_html = ""
extras = ["cuddled-lists"]
if md_file_path:
raw_html = markdown_path(md_file_path, extras=extras)
elif md_content:
raw_html = markdown(md_content, extras=extras)
if not len(raw_html):
raise ValidationError('Input markdown seems empty')
# Weasyprint HTML object
html = HTML(string=raw_html, base_url=base_url)
# Get styles
css = []
if css_file_path:
css.append(CSS(filename=css_file_path))
# Generate PDF
html.write_pdf(pdf_file_path, stylesheets=css)
return
|
...
def md2pdf(pdf_file_path, md_content=None, md_file_path=None,
css_file_path=None, base_url=None):
"""
Convert markdown file to pdf with styles
"""
...
raise ValidationError('Input markdown seems empty')
# Weasyprint HTML object
html = HTML(string=raw_html, base_url=base_url)
# Get styles
css = []
...
|
522e8d25fc55768dea4fbcfc35dc1fb63e651e6b
|
nakadi-java-client/src/main/java/nakadi/StreamOffsetObserver.java
|
nakadi-java-client/src/main/java/nakadi/StreamOffsetObserver.java
|
package nakadi;
/**
* Can be called by the {@link StreamObserver} to indicate a batch has been processed.
*
* Typically this is used to implement a checkpointer that can store the position of the
* client in the stream to track their progress.
*/
public interface StreamOffsetObserver {
/**
* Receives a {@link StreamCursorContext} that can be used to checkpoint (or more generally,
* observe) progress in the stream.
* <p></p>
* The default observer for a subscription based {@link StreamProcessor} (one which has been
* given a subscription id via {@link StreamConfiguration#subscriptionId} is
* {@link SubscriptionOffsetObserver}. This will checkpoint back to the server each time
* it's called. This behaviour can be replaced by supplying a different checkpointer via
* {@link StreamProcessor.Builder#streamOffsetObserver}.
*
* @param streamCursorContext the batch's {@link StreamCursorContext}.
*
* todo: see if we need to declare checked exceptions here to force the observer to handle.
*/
void onNext(StreamCursorContext streamCursorContext) throws NakadiException;
}
|
package nakadi;
/**
* Can be called by the {@link StreamObserver} to indicate a batch has been processed.
*
* Typically this is used to implement a checkpointer that can store the position of the
* client in the stream to track their progress.
*/
public interface StreamOffsetObserver {
/**
* Receives a {@link StreamCursorContext} that can be used to checkpoint (or more generally,
* observe) progress in the stream.
* <p></p>
* The default observer for a subscription based {@link StreamProcessor} (one which has been
* given a subscription id via {@link StreamConfiguration#subscriptionId} is
* {@link SubscriptionOffsetObserver}. This will checkpoint back to the server each time
* it's called. This behaviour can be replaced by supplying a different checkpointer via
* {@link StreamProcessor.Builder#streamOffsetObserver}.
*
* @param streamCursorContext the batch's {@link StreamCursorContext}.
*
*/
void onNext(StreamCursorContext streamCursorContext) throws NakadiException;
}
|
Remove comment, offset observer will throw runtime exceptions
|
Remove comment, offset observer will throw runtime exceptions
|
Java
|
mit
|
dehora/nakadi-java
|
java
|
## Code Before:
package nakadi;
/**
* Can be called by the {@link StreamObserver} to indicate a batch has been processed.
*
* Typically this is used to implement a checkpointer that can store the position of the
* client in the stream to track their progress.
*/
public interface StreamOffsetObserver {
/**
* Receives a {@link StreamCursorContext} that can be used to checkpoint (or more generally,
* observe) progress in the stream.
* <p></p>
* The default observer for a subscription based {@link StreamProcessor} (one which has been
* given a subscription id via {@link StreamConfiguration#subscriptionId} is
* {@link SubscriptionOffsetObserver}. This will checkpoint back to the server each time
* it's called. This behaviour can be replaced by supplying a different checkpointer via
* {@link StreamProcessor.Builder#streamOffsetObserver}.
*
* @param streamCursorContext the batch's {@link StreamCursorContext}.
*
* todo: see if we need to declare checked exceptions here to force the observer to handle.
*/
void onNext(StreamCursorContext streamCursorContext) throws NakadiException;
}
## Instruction:
Remove comment, offset observer will throw runtime exceptions
## Code After:
package nakadi;
/**
* Can be called by the {@link StreamObserver} to indicate a batch has been processed.
*
* Typically this is used to implement a checkpointer that can store the position of the
* client in the stream to track their progress.
*/
public interface StreamOffsetObserver {
/**
* Receives a {@link StreamCursorContext} that can be used to checkpoint (or more generally,
* observe) progress in the stream.
* <p></p>
* The default observer for a subscription based {@link StreamProcessor} (one which has been
* given a subscription id via {@link StreamConfiguration#subscriptionId} is
* {@link SubscriptionOffsetObserver}. This will checkpoint back to the server each time
* it's called. This behaviour can be replaced by supplying a different checkpointer via
* {@link StreamProcessor.Builder#streamOffsetObserver}.
*
* @param streamCursorContext the batch's {@link StreamCursorContext}.
*
*/
void onNext(StreamCursorContext streamCursorContext) throws NakadiException;
}
|
...
*
* @param streamCursorContext the batch's {@link StreamCursorContext}.
*
*/
void onNext(StreamCursorContext streamCursorContext) throws NakadiException;
}
...
|
5a4e8ec1179b2ae3b37190ea45fb0d72ce4d7a90
|
canopen/sync.py
|
canopen/sync.py
|
class SyncProducer(object):
"""Transmits a SYNC message periodically."""
#: COB-ID of the SYNC message
cob_id = 0x80
def __init__(self, network):
self.network = network
self.period = None
self._task = None
def transmit(self):
"""Send out a SYNC message once."""
self.network.send_message(self.cob_id, [])
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
:param float period:
Period of SYNC message in seconds.
"""
if period is not None:
self.period = period
if not self.period:
raise ValueError("A valid transmission period has not been given")
self._task = self.network.send_periodic(self.cob_id, [], self.period)
def stop(self):
"""Stop periodic transmission of SYNC message."""
self._task.stop()
|
class SyncProducer(object):
"""Transmits a SYNC message periodically."""
#: COB-ID of the SYNC message
cob_id = 0x80
def __init__(self, network):
self.network = network
self.period = None
self._task = None
def transmit(self, count=None):
"""Send out a SYNC message once.
:param int count:
Counter to add in message.
"""
data = [count] if count is not None else []
self.network.send_message(self.cob_id, data)
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
:param float period:
Period of SYNC message in seconds.
"""
if period is not None:
self.period = period
if not self.period:
raise ValueError("A valid transmission period has not been given")
self._task = self.network.send_periodic(self.cob_id, [], self.period)
def stop(self):
"""Stop periodic transmission of SYNC message."""
self._task.stop()
|
Allow specifying counter in SYNC message
|
Allow specifying counter in SYNC message
Addresses #63
|
Python
|
mit
|
christiansandberg/canopen,christiansandberg/canopen
|
python
|
## Code Before:
class SyncProducer(object):
"""Transmits a SYNC message periodically."""
#: COB-ID of the SYNC message
cob_id = 0x80
def __init__(self, network):
self.network = network
self.period = None
self._task = None
def transmit(self):
"""Send out a SYNC message once."""
self.network.send_message(self.cob_id, [])
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
:param float period:
Period of SYNC message in seconds.
"""
if period is not None:
self.period = period
if not self.period:
raise ValueError("A valid transmission period has not been given")
self._task = self.network.send_periodic(self.cob_id, [], self.period)
def stop(self):
"""Stop periodic transmission of SYNC message."""
self._task.stop()
## Instruction:
Allow specifying counter in SYNC message
Addresses #63
## Code After:
class SyncProducer(object):
"""Transmits a SYNC message periodically."""
#: COB-ID of the SYNC message
cob_id = 0x80
def __init__(self, network):
self.network = network
self.period = None
self._task = None
def transmit(self, count=None):
"""Send out a SYNC message once.
:param int count:
Counter to add in message.
"""
data = [count] if count is not None else []
self.network.send_message(self.cob_id, data)
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
:param float period:
Period of SYNC message in seconds.
"""
if period is not None:
self.period = period
if not self.period:
raise ValueError("A valid transmission period has not been given")
self._task = self.network.send_periodic(self.cob_id, [], self.period)
def stop(self):
"""Stop periodic transmission of SYNC message."""
self._task.stop()
|
...
self.period = None
self._task = None
def transmit(self, count=None):
"""Send out a SYNC message once.
:param int count:
Counter to add in message.
"""
data = [count] if count is not None else []
self.network.send_message(self.cob_id, data)
def start(self, period=None):
"""Start periodic transmission of SYNC message in a background thread.
...
|
76da7e8bcee5cb91723ebe47006b1e3c20e7cc60
|
services/httplib.py
|
services/httplib.py
|
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
# raise common error class
raise self.communication_error_class(message=u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
Make HttpLibHttpService compatible with Exception (no kwarg).
|
Make HttpLibHttpService compatible with Exception (no kwarg).
|
Python
|
bsd-2-clause
|
storecast/holon
|
python
|
## Code Before:
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
# raise common error class
raise self.communication_error_class(message=u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
## Instruction:
Make HttpLibHttpService compatible with Exception (no kwarg).
## Code After:
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
...
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
...
|
237c88f631c428a2a6afa4927a02c2f84939025c
|
Source/NSManagedObject+HYPURI.h
|
Source/NSManagedObject+HYPURI.h
|
@import CoreData;
@interface NSManagedObject (HYPURI)
- (NSString *)hyp_URI;
@end
|
@import CoreData;
@interface NSManagedObject (HYPURI)
- (NSString *)hyp_URI;
+ (NSManagedObject *)managedObjectWithURI:(NSString *)URI inContext:(NSManagedObjectContext *)context;
@end
|
Add method to retrieve object using an URI
|
Add method to retrieve object using an URI
|
C
|
mit
|
hyperoslo/NSManagedObject-HYPURI
|
c
|
## Code Before:
@import CoreData;
@interface NSManagedObject (HYPURI)
- (NSString *)hyp_URI;
@end
## Instruction:
Add method to retrieve object using an URI
## Code After:
@import CoreData;
@interface NSManagedObject (HYPURI)
- (NSString *)hyp_URI;
+ (NSManagedObject *)managedObjectWithURI:(NSString *)URI inContext:(NSManagedObjectContext *)context;
@end
|
...
- (NSString *)hyp_URI;
+ (NSManagedObject *)managedObjectWithURI:(NSString *)URI inContext:(NSManagedObjectContext *)context;
@end
...
|
1e1c8a80199eacb64783a3fa69673059aa04da90
|
boardinghouse/tests/test_template_tag.py
|
boardinghouse/tests/test_template_tag.py
|
from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import *
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo'))
|
from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
from ..models import Schema
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo'))
|
Fix tests since we changed imports.
|
Fix tests since we changed imports.
|
Python
|
bsd-3-clause
|
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
|
python
|
## Code Before:
from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import *
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo'))
## Instruction:
Fix tests since we changed imports.
## Code After:
from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
from ..models import Schema
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo'))
|
# ... existing code ...
from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
from ..models import Schema
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
# ... rest of the code ...
|
57ca28bb8d019266a16ff18a87d71f12b59224b5
|
src/exercise112.c
|
src/exercise112.c
|
/*
* A solution to Exercise 1-12 in The C Programming Language (Second Edition).
*
* This file was written by Damien Dart <[email protected]>. This is free
* and unencumbered software released into the public domain. For more
* information, please refer to the accompanying "UNLICENCE" file.
*/
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
int main(void)
{
int16_t character = 0;
bool in_whitespace = false;
while ((character = getchar()) != EOF) {
if ((character == ' ') || (character == '\t')) {
if (in_whitespace == false) {
putchar('\n');
in_whitespace = true;
}
} else {
putchar(character);
in_whitespace = false;
}
}
return EXIT_SUCCESS;
}
|
/*
* A solution to Exercise 1-12 in The C Programming Language (Second Edition).
*
* This file was written by Damien Dart <[email protected]>. This is free
* and unencumbered software released into the public domain. For more
* information, please refer to the accompanying "UNLICENCE" file.
*/
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
int main(void)
{
int16_t character = 0;
bool in_whitespace = false;
while ((character = getchar()) != EOF) {
if ((character == ' ') || (character == '\t' || character == '\n')) {
if (in_whitespace == false) {
putchar('\n');
in_whitespace = true;
}
} else {
putchar(character);
in_whitespace = false;
}
}
return EXIT_SUCCESS;
}
|
Fix solution to Exercise 1-12.
|
Fix solution to Exercise 1-12.
Fix solution to Exercise 1-12 so that newline characters are processed correctly.
|
C
|
unlicense
|
damiendart/knr-solutions,damiendart/knr-solutions,damiendart/knr-solutions
|
c
|
## Code Before:
/*
* A solution to Exercise 1-12 in The C Programming Language (Second Edition).
*
* This file was written by Damien Dart <[email protected]>. This is free
* and unencumbered software released into the public domain. For more
* information, please refer to the accompanying "UNLICENCE" file.
*/
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
int main(void)
{
int16_t character = 0;
bool in_whitespace = false;
while ((character = getchar()) != EOF) {
if ((character == ' ') || (character == '\t')) {
if (in_whitespace == false) {
putchar('\n');
in_whitespace = true;
}
} else {
putchar(character);
in_whitespace = false;
}
}
return EXIT_SUCCESS;
}
## Instruction:
Fix solution to Exercise 1-12.
Fix solution to Exercise 1-12 so that newline characters are processed correctly.
## Code After:
/*
* A solution to Exercise 1-12 in The C Programming Language (Second Edition).
*
* This file was written by Damien Dart <[email protected]>. This is free
* and unencumbered software released into the public domain. For more
* information, please refer to the accompanying "UNLICENCE" file.
*/
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
int main(void)
{
int16_t character = 0;
bool in_whitespace = false;
while ((character = getchar()) != EOF) {
if ((character == ' ') || (character == '\t' || character == '\n')) {
if (in_whitespace == false) {
putchar('\n');
in_whitespace = true;
}
} else {
putchar(character);
in_whitespace = false;
}
}
return EXIT_SUCCESS;
}
|
...
int16_t character = 0;
bool in_whitespace = false;
while ((character = getchar()) != EOF) {
if ((character == ' ') || (character == '\t' || character == '\n')) {
if (in_whitespace == false) {
putchar('\n');
in_whitespace = true;
...
|
cea439f4f5eeef40f0d04c7e8096733e7f55cf3b
|
src/main/java/ch/rasc/sec/controller/HelloController.java
|
src/main/java/ch/rasc/sec/controller/HelloController.java
|
package ch.rasc.sec.controller;
import org.springframework.security.web.bind.annotation.AuthenticationPrincipal;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import ch.rasc.sec.security.JpaUserDetails;
@Controller
public class HelloController {
@RequestMapping(value = "/", method = RequestMethod.GET)
@ResponseBody
public String sayHello(@AuthenticationPrincipal JpaUserDetails user) {
return "Hello " + user.getUsername();
}
}
|
package ch.rasc.sec.controller;
import java.util.Date;
import java.util.Map;
import org.springframework.security.web.bind.annotation.AuthenticationPrincipal;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import ch.rasc.sec.security.JpaUserDetails;
@Controller
public class HelloController {
@RequestMapping(value = "/sayHello", method = RequestMethod.GET)
@ResponseBody
public String sayHello(@AuthenticationPrincipal JpaUserDetails user) {
return "Hello " + user.getUsername();
}
@RequestMapping("/")
public String home(Map<String, Object> model,
@AuthenticationPrincipal JpaUserDetails user) {
model.put("message", "Hello World");
model.put("title", "Hello Home");
model.put("date", new Date());
model.put("user", user.getUsername());
return "home";
}
}
|
Add home request mapping method
|
Add home request mapping method
|
Java
|
mit
|
ralscha/springsecuritytotp,ralscha/springsecuritytotp,ralscha/springsecuritytotp
|
java
|
## Code Before:
package ch.rasc.sec.controller;
import org.springframework.security.web.bind.annotation.AuthenticationPrincipal;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import ch.rasc.sec.security.JpaUserDetails;
@Controller
public class HelloController {
@RequestMapping(value = "/", method = RequestMethod.GET)
@ResponseBody
public String sayHello(@AuthenticationPrincipal JpaUserDetails user) {
return "Hello " + user.getUsername();
}
}
## Instruction:
Add home request mapping method
## Code After:
package ch.rasc.sec.controller;
import java.util.Date;
import java.util.Map;
import org.springframework.security.web.bind.annotation.AuthenticationPrincipal;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import ch.rasc.sec.security.JpaUserDetails;
@Controller
public class HelloController {
@RequestMapping(value = "/sayHello", method = RequestMethod.GET)
@ResponseBody
public String sayHello(@AuthenticationPrincipal JpaUserDetails user) {
return "Hello " + user.getUsername();
}
@RequestMapping("/")
public String home(Map<String, Object> model,
@AuthenticationPrincipal JpaUserDetails user) {
model.put("message", "Hello World");
model.put("title", "Hello Home");
model.put("date", new Date());
model.put("user", user.getUsername());
return "home";
}
}
|
...
package ch.rasc.sec.controller;
import java.util.Date;
import java.util.Map;
import org.springframework.security.web.bind.annotation.AuthenticationPrincipal;
import org.springframework.stereotype.Controller;
...
@Controller
public class HelloController {
@RequestMapping(value = "/sayHello", method = RequestMethod.GET)
@ResponseBody
public String sayHello(@AuthenticationPrincipal JpaUserDetails user) {
return "Hello " + user.getUsername();
}
@RequestMapping("/")
public String home(Map<String, Object> model,
@AuthenticationPrincipal JpaUserDetails user) {
model.put("message", "Hello World");
model.put("title", "Hello Home");
model.put("date", new Date());
model.put("user", user.getUsername());
return "home";
}
}
...
|
c1464c4787ee7dcb3fc4c0fc0622cb7f336a4fe6
|
config.h
|
config.h
|
/* Uncomment to compile with tcpd/libwrap support. */
//#define WITH_WRAP
/* Compile with database upgrading support? If disabled, mosquitto won't
* automatically upgrade old database versions. */
//#define WITH_DB_UPGRADE
/* Compile with memory tracking support? If disabled, mosquitto won't track
* heap memory usage nor export '$SYS/broker/heap/current size', but will use
* slightly less memory and CPU time. */
#define WITH_MEMORY_TRACKING
|
/* Uncomment to compile with tcpd/libwrap support. */
//#define WITH_WRAP
/* Compile with database upgrading support? If disabled, mosquitto won't
* automatically upgrade old database versions. */
//#define WITH_DB_UPGRADE
/* Compile with memory tracking support? If disabled, mosquitto won't track
* heap memory usage nor export '$SYS/broker/heap/current size', but will use
* slightly less memory and CPU time. */
#define WITH_MEMORY_TRACKING
/* Compile with the ability to upgrade from old style sqlite persistent
* databases to the new mosquitto format. This means a dependency on sqlite. It
* isn't needed for new installations. */
#define WITH_SQLITE_UPGRADE
|
Add compile option for sqlite db upgrades.
|
Add compile option for sqlite db upgrades.
|
C
|
bsd-3-clause
|
tempbottle/mosquitto,tempbottle/mosquitto,tempbottle/mosquitto,tempbottle/mosquitto,tempbottle/mosquitto
|
c
|
## Code Before:
/* Uncomment to compile with tcpd/libwrap support. */
//#define WITH_WRAP
/* Compile with database upgrading support? If disabled, mosquitto won't
* automatically upgrade old database versions. */
//#define WITH_DB_UPGRADE
/* Compile with memory tracking support? If disabled, mosquitto won't track
* heap memory usage nor export '$SYS/broker/heap/current size', but will use
* slightly less memory and CPU time. */
#define WITH_MEMORY_TRACKING
## Instruction:
Add compile option for sqlite db upgrades.
## Code After:
/* Uncomment to compile with tcpd/libwrap support. */
//#define WITH_WRAP
/* Compile with database upgrading support? If disabled, mosquitto won't
* automatically upgrade old database versions. */
//#define WITH_DB_UPGRADE
/* Compile with memory tracking support? If disabled, mosquitto won't track
* heap memory usage nor export '$SYS/broker/heap/current size', but will use
* slightly less memory and CPU time. */
#define WITH_MEMORY_TRACKING
/* Compile with the ability to upgrade from old style sqlite persistent
* databases to the new mosquitto format. This means a dependency on sqlite. It
* isn't needed for new installations. */
#define WITH_SQLITE_UPGRADE
|
...
* heap memory usage nor export '$SYS/broker/heap/current size', but will use
* slightly less memory and CPU time. */
#define WITH_MEMORY_TRACKING
/* Compile with the ability to upgrade from old style sqlite persistent
* databases to the new mosquitto format. This means a dependency on sqlite. It
* isn't needed for new installations. */
#define WITH_SQLITE_UPGRADE
...
|
2766e8797515497e5569b31696416db68641c9b4
|
base/models.py
|
base/models.py
|
import os
from django.conf import settings
class MediaRemovalMixin(object):
"""
Removes all files associated with the model, as returned by the
get_media_files() method.
"""
# Models that use this mixin need to override this method
def get_media_files(self):
return
def delete(self):
for media_file in self.get_media_files():
path = settings.MEDIA_ROOT + media_file
if os.path.exists(path):
os.remove(path)
return super(MediaRemovalMixin, self).delete()
|
import os
from django.conf import settings
class MediaRemovalMixin(object):
"""
Removes all files associated with the model, as returned by the
get_media_files() method.
"""
# Models that use this mixin need to override this method
def get_media_files(self):
return
def delete(self, *args, **kwargs):
for media_file in self.get_media_files():
path = settings.MEDIA_ROOT + media_file
if os.path.exists(path):
os.remove(path)
return super(MediaRemovalMixin, self).delete(*args, **kwargs)
def save(self, *args, **kwargs):
if self.pk:
# Primary key exists, object is being edited
old_object = self.__class__.objects.get(pk=self.pk)
path_pairs = zip(old_object.get_media_files(),
self.get_media_files())
# Move each associated file to its new location
for (old_path, new_path) in path_pairs:
full_old_path = settings.MEDIA_ROOT + old_path
full_new_path = settings.MEDIA_ROOT + new_path
if old_path != new_path and os.path.exists(full_old_path):
os.rename(full_old_path, full_new_path)
return super(MediaRemovalMixin, self).save(*args, **kwargs)
|
Extend MediaRemovalMixin to move media files on updates
|
base: Extend MediaRemovalMixin to move media files on updates
|
Python
|
mit
|
matus-stehlik/roots,rtrembecky/roots,matus-stehlik/glowing-batman,tbabej/roots,rtrembecky/roots,rtrembecky/roots,matus-stehlik/roots,matus-stehlik/roots,tbabej/roots,matus-stehlik/glowing-batman,tbabej/roots
|
python
|
## Code Before:
import os
from django.conf import settings
class MediaRemovalMixin(object):
"""
Removes all files associated with the model, as returned by the
get_media_files() method.
"""
# Models that use this mixin need to override this method
def get_media_files(self):
return
def delete(self):
for media_file in self.get_media_files():
path = settings.MEDIA_ROOT + media_file
if os.path.exists(path):
os.remove(path)
return super(MediaRemovalMixin, self).delete()
## Instruction:
base: Extend MediaRemovalMixin to move media files on updates
## Code After:
import os
from django.conf import settings
class MediaRemovalMixin(object):
"""
Removes all files associated with the model, as returned by the
get_media_files() method.
"""
# Models that use this mixin need to override this method
def get_media_files(self):
return
def delete(self, *args, **kwargs):
for media_file in self.get_media_files():
path = settings.MEDIA_ROOT + media_file
if os.path.exists(path):
os.remove(path)
return super(MediaRemovalMixin, self).delete(*args, **kwargs)
def save(self, *args, **kwargs):
if self.pk:
# Primary key exists, object is being edited
old_object = self.__class__.objects.get(pk=self.pk)
path_pairs = zip(old_object.get_media_files(),
self.get_media_files())
# Move each associated file to its new location
for (old_path, new_path) in path_pairs:
full_old_path = settings.MEDIA_ROOT + old_path
full_new_path = settings.MEDIA_ROOT + new_path
if old_path != new_path and os.path.exists(full_old_path):
os.rename(full_old_path, full_new_path)
return super(MediaRemovalMixin, self).save(*args, **kwargs)
|
// ... existing code ...
def get_media_files(self):
return
def delete(self, *args, **kwargs):
for media_file in self.get_media_files():
path = settings.MEDIA_ROOT + media_file
// ... modified code ...
if os.path.exists(path):
os.remove(path)
return super(MediaRemovalMixin, self).delete(*args, **kwargs)
def save(self, *args, **kwargs):
if self.pk:
# Primary key exists, object is being edited
old_object = self.__class__.objects.get(pk=self.pk)
path_pairs = zip(old_object.get_media_files(),
self.get_media_files())
# Move each associated file to its new location
for (old_path, new_path) in path_pairs:
full_old_path = settings.MEDIA_ROOT + old_path
full_new_path = settings.MEDIA_ROOT + new_path
if old_path != new_path and os.path.exists(full_old_path):
os.rename(full_old_path, full_new_path)
return super(MediaRemovalMixin, self).save(*args, **kwargs)
// ... rest of the code ...
|
e9f68b81321a111946b0bef4ee8ba3b9bf20829f
|
src/main/java/absyn/DecVar.java
|
src/main/java/absyn/DecVar.java
|
package absyn;
import env.Env;
import javaslang.collection.Tree;
import parse.Loc;
import semantic.SemanticHelper;
import types.Type;
public class DecVar extends Dec {
public final String name;
public final String typeName;
public final Exp init;
public DecVar(Loc loc, String name, String typeName, Exp init) {
super(loc);
this.name = name;
this.typeName = typeName;
this.init = init;
}
@Override
public Tree.Node<String> toTree() {
return Tree.of("DecVar",
Tree.of(name),
Tree.of(typeName),
init.toTree());
}
@Override
public void semantic(Env env) {
Type t_init = init.semantic(env);
Type t_var = t_init;
if (typeName != null) {
Type t_typeName = env.tenv.get(typeName);
if (t_typeName == null)
throw SemanticHelper.undefined(loc, "type", typeName);
if (!t_init.is(t_typeName))
throw SemanticHelper.typeMismatch(init.loc, t_init, t_typeName);
t_var = t_typeName;
}
env.venv.put(name, t_var);
}
}
|
package absyn;
import env.Env;
import javaslang.collection.List;
import javaslang.collection.Tree;
import parse.Loc;
import semantic.SemanticHelper;
import types.Type;
public class DecVar extends Dec {
public final String name;
public final String typeName;
public final Exp init;
public DecVar(Loc loc, String name, String typeName, Exp init) {
super(loc);
this.name = name;
this.typeName = typeName;
this.init = init;
}
@Override
public Tree.Node<String> toTree() {
List<Tree.Node<String>> children = List.of(Tree.of(name));
if (typeName != null)
children = children.append(Tree.of(typeName));
children = children.append(init.toTree());
return Tree.of("DecVar", children);
}
@Override
public void semantic(Env env) {
Type t_init = init.semantic(env);
Type t_var = t_init;
if (typeName != null) {
Type t_typeName = env.tenv.get(typeName);
if (t_typeName == null)
throw SemanticHelper.undefined(loc, "type", typeName);
if (!t_init.is(t_typeName))
throw SemanticHelper.typeMismatch(init.loc, t_init, t_typeName);
t_var = t_typeName;
}
env.venv.put(name, t_var);
}
}
|
Fix toTree method for variable declarations
|
Fix toTree method for variable declarations
The type name of the variable may be missing.
|
Java
|
mit
|
romildo/eplan,romildo/eplan
|
java
|
## Code Before:
package absyn;
import env.Env;
import javaslang.collection.Tree;
import parse.Loc;
import semantic.SemanticHelper;
import types.Type;
public class DecVar extends Dec {
public final String name;
public final String typeName;
public final Exp init;
public DecVar(Loc loc, String name, String typeName, Exp init) {
super(loc);
this.name = name;
this.typeName = typeName;
this.init = init;
}
@Override
public Tree.Node<String> toTree() {
return Tree.of("DecVar",
Tree.of(name),
Tree.of(typeName),
init.toTree());
}
@Override
public void semantic(Env env) {
Type t_init = init.semantic(env);
Type t_var = t_init;
if (typeName != null) {
Type t_typeName = env.tenv.get(typeName);
if (t_typeName == null)
throw SemanticHelper.undefined(loc, "type", typeName);
if (!t_init.is(t_typeName))
throw SemanticHelper.typeMismatch(init.loc, t_init, t_typeName);
t_var = t_typeName;
}
env.venv.put(name, t_var);
}
}
## Instruction:
Fix toTree method for variable declarations
The type name of the variable may be missing.
## Code After:
package absyn;
import env.Env;
import javaslang.collection.List;
import javaslang.collection.Tree;
import parse.Loc;
import semantic.SemanticHelper;
import types.Type;
public class DecVar extends Dec {
public final String name;
public final String typeName;
public final Exp init;
public DecVar(Loc loc, String name, String typeName, Exp init) {
super(loc);
this.name = name;
this.typeName = typeName;
this.init = init;
}
@Override
public Tree.Node<String> toTree() {
List<Tree.Node<String>> children = List.of(Tree.of(name));
if (typeName != null)
children = children.append(Tree.of(typeName));
children = children.append(init.toTree());
return Tree.of("DecVar", children);
}
@Override
public void semantic(Env env) {
Type t_init = init.semantic(env);
Type t_var = t_init;
if (typeName != null) {
Type t_typeName = env.tenv.get(typeName);
if (t_typeName == null)
throw SemanticHelper.undefined(loc, "type", typeName);
if (!t_init.is(t_typeName))
throw SemanticHelper.typeMismatch(init.loc, t_init, t_typeName);
t_var = t_typeName;
}
env.venv.put(name, t_var);
}
}
|
...
package absyn;
import env.Env;
import javaslang.collection.List;
import javaslang.collection.Tree;
import parse.Loc;
import semantic.SemanticHelper;
...
@Override
public Tree.Node<String> toTree() {
List<Tree.Node<String>> children = List.of(Tree.of(name));
if (typeName != null)
children = children.append(Tree.of(typeName));
children = children.append(init.toTree());
return Tree.of("DecVar", children);
}
@Override
...
|
a867d240eba806b6aecabfa170b7cac2dfe82c2c
|
src/main/java/com/mpalourdio/hello/Application.java
|
src/main/java/com/mpalourdio/hello/Application.java
|
package com.mpalourdio.hello;
import app.config.WebSecurityConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import java.util.Arrays;
@SpringBootApplication
@PropertySource(value = {
"file:properties/global.properties",
"file:properties/local.properties"
}, ignoreResourceNotFound = true)
@Import({WebSecurityConfig.class})
public class Application {
public static void main(final String[] args) {
final ApplicationContext ctx = SpringApplication.run(Application.class, args);
final String[] beanNames = ctx.getBeanDefinitionNames();
Arrays.sort(beanNames);
Arrays.stream(beanNames).forEach(System.out::println);
}
}
|
package com.mpalourdio.hello;
import app.config.WebSecurityConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import java.util.Arrays;
@SpringBootApplication
@PropertySource(value = {
"file:properties/global.properties",
"file:properties/local.properties"
}, ignoreResourceNotFound = true)
@Import({WebSecurityConfig.class})
public class Application {
public static void main(final String[] args) {
final ApplicationContext ctx = SpringApplication.run(Application.class, args);
final String[] beanNames = ctx.getBeanDefinitionNames();
Arrays.stream(beanNames)
.sorted()
.forEach(System.out::println);
}
}
|
Sort beans names in stream
|
Sort beans names in stream
|
Java
|
mit
|
mpalourdio/SpringBootTemplate,mpalourdio/SpringBootTemplate
|
java
|
## Code Before:
package com.mpalourdio.hello;
import app.config.WebSecurityConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import java.util.Arrays;
@SpringBootApplication
@PropertySource(value = {
"file:properties/global.properties",
"file:properties/local.properties"
}, ignoreResourceNotFound = true)
@Import({WebSecurityConfig.class})
public class Application {
public static void main(final String[] args) {
final ApplicationContext ctx = SpringApplication.run(Application.class, args);
final String[] beanNames = ctx.getBeanDefinitionNames();
Arrays.sort(beanNames);
Arrays.stream(beanNames).forEach(System.out::println);
}
}
## Instruction:
Sort beans names in stream
## Code After:
package com.mpalourdio.hello;
import app.config.WebSecurityConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import java.util.Arrays;
@SpringBootApplication
@PropertySource(value = {
"file:properties/global.properties",
"file:properties/local.properties"
}, ignoreResourceNotFound = true)
@Import({WebSecurityConfig.class})
public class Application {
public static void main(final String[] args) {
final ApplicationContext ctx = SpringApplication.run(Application.class, args);
final String[] beanNames = ctx.getBeanDefinitionNames();
Arrays.stream(beanNames)
.sorted()
.forEach(System.out::println);
}
}
|
// ... existing code ...
public static void main(final String[] args) {
final ApplicationContext ctx = SpringApplication.run(Application.class, args);
final String[] beanNames = ctx.getBeanDefinitionNames();
Arrays.stream(beanNames)
.sorted()
.forEach(System.out::println);
}
}
// ... rest of the code ...
|
bed66179633a86751a938c13b98f5b56c3c1cfc7
|
fabfile.py
|
fabfile.py
|
from fabric.api import local
vim_bundles = [
{
'git': 'git://github.com/fatih/vim-go.git',
'path': '~/.vim/bundle/vim-go'
}
]
def apt_get():
local('sudo apt-get update')
local('sudo apt-get upgrade')
# neovim instead of vim?
local('sudo apt-get install zsh vim wget curl kitty suckless-tools \
xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \
keepassx xdotool xclip rtorrent diffpdf xfce4 redshift-gtk')
def oh_my_zsh():
local('curl -L http://install.ohmyz.sh | sh')
local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc')
local('chsh -s $(which shell)')
def install_vim():
local('mkdir -p ~/.vim/autoload ~/.vim/bundle')
local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim')
for bundle in vim_bundles:
local('git clone ' + bundle['git'] + ' ' + bundle['path'])
local('cd ~')
def update_vim():
for bundle in vim_bundles:
local('cd ' + bundle['path'] + ' && git pull')
local('cd ~')
|
from fabric.api import local
vim_bundles = [
{
'git': 'git://github.com/fatih/vim-go.git',
'path': '~/.vim/bundle/vim-go'
}
]
def apt_get():
local('sudo apt-get update')
local('sudo apt-get upgrade')
# neovim instead of vim?
local('sudo apt-get install zsh vim wget curl kitty suckless-tools \
xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \
keepassxc xdotool xclip rtorrent diffpdf xfce4 redshift-gtk \
graphviz')
def oh_my_zsh():
local('curl -L http://install.ohmyz.sh | sh')
local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc')
local('chsh -s $(which shell)')
def install_vim():
local('mkdir -p ~/.vim/autoload ~/.vim/bundle')
local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim')
for bundle in vim_bundles:
local('git clone ' + bundle['git'] + ' ' + bundle['path'])
local('cd ~')
def update_vim():
for bundle in vim_bundles:
local('cd ' + bundle['path'] + ' && git pull')
local('cd ~')
|
Add graphviz for converting dot to pdf
|
Add graphviz for converting dot to pdf
|
Python
|
unlicense
|
spanners/dotfiles
|
python
|
## Code Before:
from fabric.api import local
vim_bundles = [
{
'git': 'git://github.com/fatih/vim-go.git',
'path': '~/.vim/bundle/vim-go'
}
]
def apt_get():
local('sudo apt-get update')
local('sudo apt-get upgrade')
# neovim instead of vim?
local('sudo apt-get install zsh vim wget curl kitty suckless-tools \
xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \
keepassx xdotool xclip rtorrent diffpdf xfce4 redshift-gtk')
def oh_my_zsh():
local('curl -L http://install.ohmyz.sh | sh')
local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc')
local('chsh -s $(which shell)')
def install_vim():
local('mkdir -p ~/.vim/autoload ~/.vim/bundle')
local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim')
for bundle in vim_bundles:
local('git clone ' + bundle['git'] + ' ' + bundle['path'])
local('cd ~')
def update_vim():
for bundle in vim_bundles:
local('cd ' + bundle['path'] + ' && git pull')
local('cd ~')
## Instruction:
Add graphviz for converting dot to pdf
## Code After:
from fabric.api import local
vim_bundles = [
{
'git': 'git://github.com/fatih/vim-go.git',
'path': '~/.vim/bundle/vim-go'
}
]
def apt_get():
local('sudo apt-get update')
local('sudo apt-get upgrade')
# neovim instead of vim?
local('sudo apt-get install zsh vim wget curl kitty suckless-tools \
xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \
keepassxc xdotool xclip rtorrent diffpdf xfce4 redshift-gtk \
graphviz')
def oh_my_zsh():
local('curl -L http://install.ohmyz.sh | sh')
local('cp ~/.zshrc.pre-oh-my-zsh ~/.zshrc')
local('chsh -s $(which shell)')
def install_vim():
local('mkdir -p ~/.vim/autoload ~/.vim/bundle')
local('curl -LSso ~/.vim/autoload/pathogen.vim https://tpo.pe/pathogen.vim')
for bundle in vim_bundles:
local('git clone ' + bundle['git'] + ' ' + bundle['path'])
local('cd ~')
def update_vim():
for bundle in vim_bundles:
local('cd ' + bundle['path'] + ' && git pull')
local('cd ~')
|
...
# neovim instead of vim?
local('sudo apt-get install zsh vim wget curl kitty suckless-tools \
xautolock feh tmux neomutt mpd ncmpcpp vlc unp htop exa \
keepassxc xdotool xclip rtorrent diffpdf xfce4 redshift-gtk \
graphviz')
def oh_my_zsh():
local('curl -L http://install.ohmyz.sh | sh')
...
|
9eafc01ef8260a313f2e214924cfd5bda706c1c0
|
cactusbot/handler.py
|
cactusbot/handler.py
|
"""Handle handlers."""
import logging
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.handlers = handlers
def handle(self, event, packet):
"""Handle incoming data."""
for handler in self.handlers:
if hasattr(handler, "on_" + event):
response = ""
try:
response = getattr(handler, "on_" + event)(packet)
except Exception as e:
print("Uh oh!")
print(e)
else:
if response is StopIteration:
break
yield response
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
|
"""Handle handlers."""
import logging
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.logger = logging.getLogger(__name__)
self.handlers = handlers
def handle(self, event, packet):
"""Handle incoming data."""
for handler in self.handlers:
if hasattr(handler, "on_" + event):
response = ""
try:
response = getattr(handler, "on_" + event)(packet)
except Exception as e:
self.logger.warning(e)
else:
if response is StopIteration:
break
yield response
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
|
Add exception logging to Handlers
|
Add exception logging to Handlers
|
Python
|
mit
|
CactusDev/CactusBot
|
python
|
## Code Before:
"""Handle handlers."""
import logging
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.handlers = handlers
def handle(self, event, packet):
"""Handle incoming data."""
for handler in self.handlers:
if hasattr(handler, "on_" + event):
response = ""
try:
response = getattr(handler, "on_" + event)(packet)
except Exception as e:
print("Uh oh!")
print(e)
else:
if response is StopIteration:
break
yield response
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
## Instruction:
Add exception logging to Handlers
## Code After:
"""Handle handlers."""
import logging
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.logger = logging.getLogger(__name__)
self.handlers = handlers
def handle(self, event, packet):
"""Handle incoming data."""
for handler in self.handlers:
if hasattr(handler, "on_" + event):
response = ""
try:
response = getattr(handler, "on_" + event)(packet)
except Exception as e:
self.logger.warning(e)
else:
if response is StopIteration:
break
yield response
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
|
// ... existing code ...
"""Handle handlers."""
import logging
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.logger = logging.getLogger(__name__)
self.handlers = handlers
def handle(self, event, packet):
// ... modified code ...
try:
response = getattr(handler, "on_" + event)(packet)
except Exception as e:
self.logger.warning(e)
else:
if response is StopIteration:
break
...
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
// ... rest of the code ...
|
e538f2862a875afc58071a9fc6419e4290f8b00d
|
rouver/types.py
|
rouver/types.py
|
from types import TracebackType
from typing import \
Callable, Tuple, Dict, Any, Iterable, Sequence, Mapping, Optional, Type
from werkzeug.wrappers import Request
# (name, value)
Header = Tuple[str, str]
WSGIEnvironment = Dict[str, Any]
_exc_info = Tuple[Optional[Type[BaseException]],
Optional[BaseException],
Optional[TracebackType]]
# (body) -> None
StartResponseReturnType = Callable[[bytes], None]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse = Callable[..., StartResponseReturnType]
WSGIResponse = Iterable[bytes]
WSGIApplication = Callable[[WSGIEnvironment, StartResponse], WSGIResponse]
# (method, path, callback)
RouteDescription = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler = Callable[[Request, Sequence[Any], str], Any]
BadArgumentsDict = Mapping[str, str]
|
from typing import \
Callable, Tuple, Dict, Any, Iterable, Sequence, Mapping
from werkzeug.wrappers import Request
# (name, value)
Header = Tuple[str, str]
WSGIEnvironment = Dict[str, Any]
# (body) -> None
StartResponseReturnType = Callable[[bytes], None]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse = Callable[..., StartResponseReturnType]
WSGIResponse = Iterable[bytes]
WSGIApplication = Callable[[WSGIEnvironment, StartResponse], WSGIResponse]
# (method, path, callback)
RouteDescription = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler = Callable[[Request, Sequence[Any], str], Any]
BadArgumentsDict = Mapping[str, str]
|
Remove obsolete aliases and imports
|
Remove obsolete aliases and imports
|
Python
|
mit
|
srittau/rouver
|
python
|
## Code Before:
from types import TracebackType
from typing import \
Callable, Tuple, Dict, Any, Iterable, Sequence, Mapping, Optional, Type
from werkzeug.wrappers import Request
# (name, value)
Header = Tuple[str, str]
WSGIEnvironment = Dict[str, Any]
_exc_info = Tuple[Optional[Type[BaseException]],
Optional[BaseException],
Optional[TracebackType]]
# (body) -> None
StartResponseReturnType = Callable[[bytes], None]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse = Callable[..., StartResponseReturnType]
WSGIResponse = Iterable[bytes]
WSGIApplication = Callable[[WSGIEnvironment, StartResponse], WSGIResponse]
# (method, path, callback)
RouteDescription = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler = Callable[[Request, Sequence[Any], str], Any]
BadArgumentsDict = Mapping[str, str]
## Instruction:
Remove obsolete aliases and imports
## Code After:
from typing import \
Callable, Tuple, Dict, Any, Iterable, Sequence, Mapping
from werkzeug.wrappers import Request
# (name, value)
Header = Tuple[str, str]
WSGIEnvironment = Dict[str, Any]
# (body) -> None
StartResponseReturnType = Callable[[bytes], None]
# (status: str, headers: List[Headers], exc_info) -> response
StartResponse = Callable[..., StartResponseReturnType]
WSGIResponse = Iterable[bytes]
WSGIApplication = Callable[[WSGIEnvironment, StartResponse], WSGIResponse]
# (method, path, callback)
RouteDescription = Tuple[str, str, WSGIApplication]
# (request, previous_args, path_part) -> result
RouteTemplateHandler = Callable[[Request, Sequence[Any], str], Any]
BadArgumentsDict = Mapping[str, str]
|
...
from typing import \
Callable, Tuple, Dict, Any, Iterable, Sequence, Mapping
from werkzeug.wrappers import Request
...
Header = Tuple[str, str]
WSGIEnvironment = Dict[str, Any]
# (body) -> None
StartResponseReturnType = Callable[[bytes], None]
...
|
89971ece16ee1c062a8a54fa5cd83c473628c2ba
|
pyanyapi/helpers.py
|
pyanyapi/helpers.py
|
class cached_property(object):
"""
Copied from Django.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type=None):
if instance is None:
return self
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
def attach_attribute(target, name, attr):
attr.__name__ = name
setattr(target, name, attr)
def attach_cached_property(target, name, prop):
method = cached_property(prop)
attach_attribute(target, name, method)
|
class cached_property(object):
"""
Copied from Django.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type=None):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
def attach_attribute(target, name, attr):
attr.__name__ = name
setattr(target, name, attr)
def attach_cached_property(target, name, prop):
method = cached_property(prop)
attach_attribute(target, name, method)
|
Remove unused line from cached_property
|
Remove unused line from cached_property
|
Python
|
mit
|
gorlemik/pyanyapi,Stranger6667/pyanyapi
|
python
|
## Code Before:
class cached_property(object):
"""
Copied from Django.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type=None):
if instance is None:
return self
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
def attach_attribute(target, name, attr):
attr.__name__ = name
setattr(target, name, attr)
def attach_cached_property(target, name, prop):
method = cached_property(prop)
attach_attribute(target, name, method)
## Instruction:
Remove unused line from cached_property
## Code After:
class cached_property(object):
"""
Copied from Django.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type=None):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
def attach_attribute(target, name, attr):
attr.__name__ = name
setattr(target, name, attr)
def attach_cached_property(target, name, prop):
method = cached_property(prop)
attach_attribute(target, name, method)
|
# ... existing code ...
self.func = func
def __get__(self, instance, type=None):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
# ... rest of the code ...
|
7e51d073952d10d3994fb93458e60c03b6746099
|
app/services/g6importService.py
|
app/services/g6importService.py
|
import json
import jsonschema
from flask import current_app
from jsonschema import validate
with open("schemata/g6-scs-schema.json") as json_file1:
G6_SCS_SCHEMA = json.load(json_file1)
with open("schemata/g6-saas-schema.json") as json_file2:
G6_SAAS_SCHEMA = json.load(json_file2)
with open("schemata/g6-iaas-schema.json") as json_file3:
G6_IAAS_SCHEMA = json.load(json_file3)
with open("schemata/g6-paas-schema.json") as json_file4:
G6_PAAS_SCHEMA = json.load(json_file4)
def validate_json(submitted_json):
#current_app.logger.info('Validating JSON:' + str(submitted_json))
try:
validate(submitted_json, G6_SCS_SCHEMA)
return 'G6-SCS'
except jsonschema.ValidationError as e1:
try:
validate(submitted_json, G6_SAAS_SCHEMA)
return 'G6-SaaS'
except jsonschema.ValidationError as e2:
try:
validate(submitted_json, G6_IAAS_SCHEMA)
return 'G6-IaaS'
except jsonschema.ValidationError as e3:
try:
validate(submitted_json, G6_PAAS_SCHEMA)
return 'G6-PaaS'
except jsonschema.ValidationError as e4:
print e4.message
print 'Failed validation'
return False
else:
return True
|
import json
import jsonschema
from jsonschema import validate
with open("schemata/g6-scs-schema.json") as json_file1:
G6_SCS_SCHEMA = json.load(json_file1)
with open("schemata/g6-saas-schema.json") as json_file2:
G6_SAAS_SCHEMA = json.load(json_file2)
with open("schemata/g6-iaas-schema.json") as json_file3:
G6_IAAS_SCHEMA = json.load(json_file3)
with open("schemata/g6-paas-schema.json") as json_file4:
G6_PAAS_SCHEMA = json.load(json_file4)
def validate_json(submitted_json):
if validates_against_schema(G6_SCS_SCHEMA,submitted_json):
return 'G6-SCS'
elif validates_against_schema(G6_SAAS_SCHEMA,submitted_json):
return 'G6-SaaS'
elif validates_against_schema(G6_PAAS_SCHEMA,submitted_json):
return 'G6-PaaS'
elif validates_against_schema(G6_IAAS_SCHEMA,submitted_json):
return 'G6-IaaS'
else:
print 'Failed validation'
return False
def validates_against_schema(schema, submitted_json):
try:
validate(submitted_json, schema)
except jsonschema.ValidationError:
return False
else:
return True
|
Improve code by avoiding flow through exception handling
|
Improve code by avoiding flow through exception handling
|
Python
|
mit
|
RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api
|
python
|
## Code Before:
import json
import jsonschema
from flask import current_app
from jsonschema import validate
with open("schemata/g6-scs-schema.json") as json_file1:
G6_SCS_SCHEMA = json.load(json_file1)
with open("schemata/g6-saas-schema.json") as json_file2:
G6_SAAS_SCHEMA = json.load(json_file2)
with open("schemata/g6-iaas-schema.json") as json_file3:
G6_IAAS_SCHEMA = json.load(json_file3)
with open("schemata/g6-paas-schema.json") as json_file4:
G6_PAAS_SCHEMA = json.load(json_file4)
def validate_json(submitted_json):
#current_app.logger.info('Validating JSON:' + str(submitted_json))
try:
validate(submitted_json, G6_SCS_SCHEMA)
return 'G6-SCS'
except jsonschema.ValidationError as e1:
try:
validate(submitted_json, G6_SAAS_SCHEMA)
return 'G6-SaaS'
except jsonschema.ValidationError as e2:
try:
validate(submitted_json, G6_IAAS_SCHEMA)
return 'G6-IaaS'
except jsonschema.ValidationError as e3:
try:
validate(submitted_json, G6_PAAS_SCHEMA)
return 'G6-PaaS'
except jsonschema.ValidationError as e4:
print e4.message
print 'Failed validation'
return False
else:
return True
## Instruction:
Improve code by avoiding flow through exception handling
## Code After:
import json
import jsonschema
from jsonschema import validate
with open("schemata/g6-scs-schema.json") as json_file1:
G6_SCS_SCHEMA = json.load(json_file1)
with open("schemata/g6-saas-schema.json") as json_file2:
G6_SAAS_SCHEMA = json.load(json_file2)
with open("schemata/g6-iaas-schema.json") as json_file3:
G6_IAAS_SCHEMA = json.load(json_file3)
with open("schemata/g6-paas-schema.json") as json_file4:
G6_PAAS_SCHEMA = json.load(json_file4)
def validate_json(submitted_json):
if validates_against_schema(G6_SCS_SCHEMA,submitted_json):
return 'G6-SCS'
elif validates_against_schema(G6_SAAS_SCHEMA,submitted_json):
return 'G6-SaaS'
elif validates_against_schema(G6_PAAS_SCHEMA,submitted_json):
return 'G6-PaaS'
elif validates_against_schema(G6_IAAS_SCHEMA,submitted_json):
return 'G6-IaaS'
else:
print 'Failed validation'
return False
def validates_against_schema(schema, submitted_json):
try:
validate(submitted_json, schema)
except jsonschema.ValidationError:
return False
else:
return True
|
// ... existing code ...
import json
import jsonschema
from jsonschema import validate
with open("schemata/g6-scs-schema.json") as json_file1:
// ... modified code ...
with open("schemata/g6-paas-schema.json") as json_file4:
G6_PAAS_SCHEMA = json.load(json_file4)
def validate_json(submitted_json):
if validates_against_schema(G6_SCS_SCHEMA,submitted_json):
return 'G6-SCS'
elif validates_against_schema(G6_SAAS_SCHEMA,submitted_json):
return 'G6-SaaS'
elif validates_against_schema(G6_PAAS_SCHEMA,submitted_json):
return 'G6-PaaS'
elif validates_against_schema(G6_IAAS_SCHEMA,submitted_json):
return 'G6-IaaS'
else:
print 'Failed validation'
return False
def validates_against_schema(schema, submitted_json):
try:
validate(submitted_json, schema)
except jsonschema.ValidationError:
return False
else:
return True
// ... rest of the code ...
|
844eaf8150c538fa76430bdade29f76f6ac5ba5b
|
integration-tests/src/test/java/arez/integration/dagger/codegen/AbstractCodegenIntegrationTest.java
|
integration-tests/src/test/java/arez/integration/dagger/codegen/AbstractCodegenIntegrationTest.java
|
package arez.integration.dagger.codegen;
import arez.integration.AbstractArezIntegrationTest;
import javax.annotation.Nonnull;
import static org.testng.Assert.*;
public class AbstractCodegenIntegrationTest
extends AbstractArezIntegrationTest
{
final void assertClassNotPresent( @Nonnull final String className )
{
assertThrows( ClassNotFoundException.class, () -> Class.forName( "arez.integration.dagger.codegen." + className ) );
}
final void assertClassPresent( @Nonnull final String className )
throws ClassNotFoundException
{
assertNotNull( Class.forName( "arez.integration.dagger.codegen." + className ) );
}
}
|
package arez.integration.dagger.codegen;
import arez.integration.AbstractArezIntegrationTest;
import javax.annotation.Nonnull;
import static org.testng.Assert.*;
public class AbstractCodegenIntegrationTest
extends AbstractArezIntegrationTest
{
protected final void assertClassNotPresent( @Nonnull final String className )
{
assertThrows( ClassNotFoundException.class, () -> Class.forName( getPackagePrefix() + className ) );
}
protected final void assertClassPresent( @Nonnull final String className )
throws ClassNotFoundException
{
assertNotNull( Class.forName( getPackagePrefix() + className ) );
}
@Nonnull
private String getPackagePrefix()
{
return getClass().getCanonicalName().replaceAll( "^(.*\\.)[^.]+$", "$1" );
}
}
|
Make assertions work in other packages
|
Make assertions work in other packages
|
Java
|
apache-2.0
|
realityforge/arez,realityforge/arez,realityforge/arez
|
java
|
## Code Before:
package arez.integration.dagger.codegen;
import arez.integration.AbstractArezIntegrationTest;
import javax.annotation.Nonnull;
import static org.testng.Assert.*;
public class AbstractCodegenIntegrationTest
extends AbstractArezIntegrationTest
{
final void assertClassNotPresent( @Nonnull final String className )
{
assertThrows( ClassNotFoundException.class, () -> Class.forName( "arez.integration.dagger.codegen." + className ) );
}
final void assertClassPresent( @Nonnull final String className )
throws ClassNotFoundException
{
assertNotNull( Class.forName( "arez.integration.dagger.codegen." + className ) );
}
}
## Instruction:
Make assertions work in other packages
## Code After:
package arez.integration.dagger.codegen;
import arez.integration.AbstractArezIntegrationTest;
import javax.annotation.Nonnull;
import static org.testng.Assert.*;
public class AbstractCodegenIntegrationTest
extends AbstractArezIntegrationTest
{
protected final void assertClassNotPresent( @Nonnull final String className )
{
assertThrows( ClassNotFoundException.class, () -> Class.forName( getPackagePrefix() + className ) );
}
protected final void assertClassPresent( @Nonnull final String className )
throws ClassNotFoundException
{
assertNotNull( Class.forName( getPackagePrefix() + className ) );
}
@Nonnull
private String getPackagePrefix()
{
return getClass().getCanonicalName().replaceAll( "^(.*\\.)[^.]+$", "$1" );
}
}
|
...
public class AbstractCodegenIntegrationTest
extends AbstractArezIntegrationTest
{
protected final void assertClassNotPresent( @Nonnull final String className )
{
assertThrows( ClassNotFoundException.class, () -> Class.forName( getPackagePrefix() + className ) );
}
protected final void assertClassPresent( @Nonnull final String className )
throws ClassNotFoundException
{
assertNotNull( Class.forName( getPackagePrefix() + className ) );
}
@Nonnull
private String getPackagePrefix()
{
return getClass().getCanonicalName().replaceAll( "^(.*\\.)[^.]+$", "$1" );
}
}
...
|
f4adce54b573b7776cf3f56230821f982c16b49f
|
modules/helloworld/helloworld.py
|
modules/helloworld/helloworld.py
|
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
or
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
import time
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
time.sleep(0.05)
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
Add time.sleep(0.05) in test module
|
Add time.sleep(0.05) in test module
|
Python
|
mit
|
RickGray/cyberbot
|
python
|
## Code Before:
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
or
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
## Instruction:
Add time.sleep(0.05) in test module
## Code After:
import time
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
time.sleep(0.05)
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
# ... existing code ...
import time
def run(seed):
""" function to run
# ... modified code ...
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
...
'data': 'Hello World! Jone',
'exception': None
}
result = {
'seed': 'Jone',
...
seed = result['seed']
data = result['data']
exception = result['exception']
time.sleep(0.05)
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
# ... rest of the code ...
|
41beca23fff6eab718550d0ce8d22769653c3109
|
sauce_test/test_suite.py
|
sauce_test/test_suite.py
|
import unittest
import access_dvn
# This is a list of testFileName.testClass
def suite():
return unittest.TestSuite((\
unittest.makeSuite(access_dvn.AccessDVN),
))
if __name__ == "__main__":
result = unittest.TextTestRunner(verbosity=2).run(suite())
# sys.exit(not result.wasSuccessful())
|
import unittest
import access_dvn
import test_dataverse
import test_dataset
# This is a list of testFileName.testClass
def suite():
return unittest.TestSuite((\
unittest.makeSuite(access_dvn.AccessDVN),
unittest.makeSuite(test_dataverse.TestDataverseFunctions),
unittest.makeSuite(test_dataset.TestDatasetFunctions),
))
if __name__ == "__main__":
result = unittest.TextTestRunner(verbosity=2).run(suite())
# sys.exit(not result.wasSuccessful())
|
Update test suite to include dataverse and dataset tests.
|
Update test suite to include dataverse and dataset tests.
|
Python
|
apache-2.0
|
ekoi/DANS-DVN-4.6.1,ekoi/DANS-DVN-4.6.1,quarian/dataverse,leeper/dataverse-1,leeper/dataverse-1,bmckinney/dataverse-canonical,bmckinney/dataverse-canonical,JayanthyChengan/dataverse,quarian/dataverse,quarian/dataverse,leeper/dataverse-1,leeper/dataverse-1,JayanthyChengan/dataverse,JayanthyChengan/dataverse,quarian/dataverse,majorseitan/dataverse,bmckinney/dataverse-canonical,JayanthyChengan/dataverse,ekoi/DANS-DVN-4.6.1,quarian/dataverse,majorseitan/dataverse,JayanthyChengan/dataverse,quarian/dataverse,majorseitan/dataverse,jacksonokuhn/dataverse,majorseitan/dataverse,ekoi/DANS-DVN-4.6.1,JayanthyChengan/dataverse,jacksonokuhn/dataverse,jacksonokuhn/dataverse,quarian/dataverse,ekoi/DANS-DVN-4.6.1,majorseitan/dataverse,jacksonokuhn/dataverse,leeper/dataverse-1,leeper/dataverse-1,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,jacksonokuhn/dataverse,jacksonokuhn/dataverse,JayanthyChengan/dataverse,bmckinney/dataverse-canonical,leeper/dataverse-1,majorseitan/dataverse,JayanthyChengan/dataverse,leeper/dataverse-1,majorseitan/dataverse,ekoi/DANS-DVN-4.6.1,bmckinney/dataverse-canonical,jacksonokuhn/dataverse,majorseitan/dataverse,bmckinney/dataverse-canonical,jacksonokuhn/dataverse,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,quarian/dataverse
|
python
|
## Code Before:
import unittest
import access_dvn
# This is a list of testFileName.testClass
def suite():
return unittest.TestSuite((\
unittest.makeSuite(access_dvn.AccessDVN),
))
if __name__ == "__main__":
result = unittest.TextTestRunner(verbosity=2).run(suite())
# sys.exit(not result.wasSuccessful())
## Instruction:
Update test suite to include dataverse and dataset tests.
## Code After:
import unittest
import access_dvn
import test_dataverse
import test_dataset
# This is a list of testFileName.testClass
def suite():
return unittest.TestSuite((\
unittest.makeSuite(access_dvn.AccessDVN),
unittest.makeSuite(test_dataverse.TestDataverseFunctions),
unittest.makeSuite(test_dataset.TestDatasetFunctions),
))
if __name__ == "__main__":
result = unittest.TextTestRunner(verbosity=2).run(suite())
# sys.exit(not result.wasSuccessful())
|
// ... existing code ...
import unittest
import access_dvn
import test_dataverse
import test_dataset
# This is a list of testFileName.testClass
def suite():
return unittest.TestSuite((\
unittest.makeSuite(access_dvn.AccessDVN),
unittest.makeSuite(test_dataverse.TestDataverseFunctions),
unittest.makeSuite(test_dataset.TestDatasetFunctions),
))
if __name__ == "__main__":
// ... rest of the code ...
|
8db9c583f92f65d03d7688d5b65cecf7a906d918
|
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/akka/osgi/impl/QuarantinedMonitorActorPropsFactory.java
|
opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/akka/osgi/impl/QuarantinedMonitorActorPropsFactory.java
|
/*
* Copyright (c) 2017 Pantheon Technologies s.r.o. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.cluster.akka.osgi.impl;
import akka.actor.Props;
import org.opendaylight.controller.cluster.common.actor.QuarantinedMonitorActor;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class QuarantinedMonitorActorPropsFactory {
private static final Logger LOG = LoggerFactory.getLogger(QuarantinedMonitorActorPropsFactory.class);
private QuarantinedMonitorActorPropsFactory() {
}
public static Props createProps(final BundleContext bundleContext) {
return QuarantinedMonitorActor.props(() -> {
// restart the entire karaf container
LOG.warn("Restarting karaf container");
System.setProperty("karaf.restart.jvm", "true");
bundleContext.getBundle(0).stop();
});
}
}
|
/*
* Copyright (c) 2017 Pantheon Technologies s.r.o. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.cluster.akka.osgi.impl;
import akka.actor.Props;
import org.opendaylight.controller.cluster.common.actor.QuarantinedMonitorActor;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class QuarantinedMonitorActorPropsFactory {
private static final Logger LOG = LoggerFactory.getLogger(QuarantinedMonitorActorPropsFactory.class);
private QuarantinedMonitorActorPropsFactory() {
}
public static Props createProps(final BundleContext bundleContext) {
return QuarantinedMonitorActor.props(() -> {
// restart the entire karaf container
LOG.warn("Restarting karaf container");
System.setProperty("karaf.restart.jvm", "true");
System.setProperty("karaf.restart", "true");
bundleContext.getBundle(0).stop();
});
}
}
|
Update procedure to restart controller on quarantined condition.
|
Update procedure to restart controller on quarantined condition.
There was a behavior change in Karaf [0] because of which restarting
the container now requires the system property karaf.restart to be
set to true in addition to karaf.restart.jvm property. Update
controller restart logic on quarantined condition for the same.
[0] https://issues.apache.org/jira/browse/KARAF-5179
Change-Id: I7b93eb87f53870efea70f2c9a9b82eeca783aa0b
Signed-off-by: Ajay Lele <[email protected]>
|
Java
|
epl-1.0
|
opendaylight/controller
|
java
|
## Code Before:
/*
* Copyright (c) 2017 Pantheon Technologies s.r.o. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.cluster.akka.osgi.impl;
import akka.actor.Props;
import org.opendaylight.controller.cluster.common.actor.QuarantinedMonitorActor;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class QuarantinedMonitorActorPropsFactory {
private static final Logger LOG = LoggerFactory.getLogger(QuarantinedMonitorActorPropsFactory.class);
private QuarantinedMonitorActorPropsFactory() {
}
public static Props createProps(final BundleContext bundleContext) {
return QuarantinedMonitorActor.props(() -> {
// restart the entire karaf container
LOG.warn("Restarting karaf container");
System.setProperty("karaf.restart.jvm", "true");
bundleContext.getBundle(0).stop();
});
}
}
## Instruction:
Update procedure to restart controller on quarantined condition.
There was a behavior change in Karaf [0] because of which restarting
the container now requires the system property karaf.restart to be
set to true in addition to karaf.restart.jvm property. Update
controller restart logic on quarantined condition for the same.
[0] https://issues.apache.org/jira/browse/KARAF-5179
Change-Id: I7b93eb87f53870efea70f2c9a9b82eeca783aa0b
Signed-off-by: Ajay Lele <[email protected]>
## Code After:
/*
* Copyright (c) 2017 Pantheon Technologies s.r.o. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.cluster.akka.osgi.impl;
import akka.actor.Props;
import org.opendaylight.controller.cluster.common.actor.QuarantinedMonitorActor;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class QuarantinedMonitorActorPropsFactory {
private static final Logger LOG = LoggerFactory.getLogger(QuarantinedMonitorActorPropsFactory.class);
private QuarantinedMonitorActorPropsFactory() {
}
public static Props createProps(final BundleContext bundleContext) {
return QuarantinedMonitorActor.props(() -> {
// restart the entire karaf container
LOG.warn("Restarting karaf container");
System.setProperty("karaf.restart.jvm", "true");
System.setProperty("karaf.restart", "true");
bundleContext.getBundle(0).stop();
});
}
}
|
...
// restart the entire karaf container
LOG.warn("Restarting karaf container");
System.setProperty("karaf.restart.jvm", "true");
System.setProperty("karaf.restart", "true");
bundleContext.getBundle(0).stop();
});
}
...
|
094c89122f19f1acecd099c42c4e8ce9a1916d0c
|
src/main/java/org/jboss/rhiot/services/api/IRHIoTTagScanner.java
|
src/main/java/org/jboss/rhiot/services/api/IRHIoTTagScanner.java
|
package org.jboss.rhiot.services.api;
/**
* Created by sstark on 5/25/16.
*/
public interface IRHIoTTagScanner {
}
|
package org.jboss.rhiot.services.api;
/**
* Created by sstark on 5/25/16.
*/
public interface IRHIoTTagScanner {
// Metrics keys
// Set 1, the tag data sent on every advertisement event
static final String TAG_TEMP = "rhiotTag.temperature";
static final String TAG_KEYS = "rhiotTag.keys";
static final String TAG_LUX = "rhiotTag.lux";
// Set 2, the game state information sent on an event such as a key press or state timeout
static final String TAG_PREV_STATE = "rhiotTag.prevState";
static final String TAG_NEW_STATE = "rhiotTag.newState";
static final String TAG_EVENT = "rhiotTag.event";
// Set 3, the game progress information sent while the game is active.
static final String TAG_GAME_TIME_LEFT = "rhiotTag.gameTimeLeft";
static final String TAG_GAME_SCORE = "rhiotTag.gameScore";
static final String TAG_SHOOTING_TIME_LEFT = "rhiotTag.shootingTimeLeft";
static final String TAG_SHOTS_LEFT = "rhiotTag.shotsLeft";
// Set 4, the information about a hit on the light sensor sent when a sensor reading above a threshold value is detected
static final String TAG_HIT_SCORE = "rhiotTag.hitScore";
static final String TAG_HIT_RINGS_OFF_CENTER = "rhiotTag.hitRingsOffCenter";
// Set 5, the information about the game scores sent on each game end
static final String GW_LAST_GAME_SCORE = "rhiotTagGW.score";
static final String GW_LAST_GAME_SCORE_HITS = "rhiotTagGW.hits";
static final String GW_LAST_GAME_SCORE_TAG_ADDRESS = "rhiotTagGW.scoreTagAddress";
// Set 6, the information about the high game scores sent on game end when a new high score is achieved
static final String GW_HIGH_GAME_SCORE = "rhiotTagGW.highScore";
static final String GW_HIGH_GAME_SCORE_HITS = "rhiotTagGW.highScoreHits";
static final String GW_HIGH_GAME_SCORE_TAG_ADDRESS = "rhiotTagGame.highScoreTagAddress";
// REST endpoints
String CLOUD_PW_PATH = "/cloud-password";
String TAG_INFO_PATH = "/tags";
String GAMESM_DIGRAPH_PATH = "/gamesm-digraph";
String GAMESM_INFO_PATH = "/gamesm";
}
|
Add constants for the msg metrics and rest endpoints
|
Add constants for the msg metrics and rest endpoints
|
Java
|
apache-2.0
|
RHioTResearch/RHIoTTagServices
|
java
|
## Code Before:
package org.jboss.rhiot.services.api;
/**
* Created by sstark on 5/25/16.
*/
public interface IRHIoTTagScanner {
}
## Instruction:
Add constants for the msg metrics and rest endpoints
## Code After:
package org.jboss.rhiot.services.api;
/**
* Created by sstark on 5/25/16.
*/
public interface IRHIoTTagScanner {
// Metrics keys
// Set 1, the tag data sent on every advertisement event
static final String TAG_TEMP = "rhiotTag.temperature";
static final String TAG_KEYS = "rhiotTag.keys";
static final String TAG_LUX = "rhiotTag.lux";
// Set 2, the game state information sent on an event such as a key press or state timeout
static final String TAG_PREV_STATE = "rhiotTag.prevState";
static final String TAG_NEW_STATE = "rhiotTag.newState";
static final String TAG_EVENT = "rhiotTag.event";
// Set 3, the game progress information sent while the game is active.
static final String TAG_GAME_TIME_LEFT = "rhiotTag.gameTimeLeft";
static final String TAG_GAME_SCORE = "rhiotTag.gameScore";
static final String TAG_SHOOTING_TIME_LEFT = "rhiotTag.shootingTimeLeft";
static final String TAG_SHOTS_LEFT = "rhiotTag.shotsLeft";
// Set 4, the information about a hit on the light sensor sent when a sensor reading above a threshold value is detected
static final String TAG_HIT_SCORE = "rhiotTag.hitScore";
static final String TAG_HIT_RINGS_OFF_CENTER = "rhiotTag.hitRingsOffCenter";
// Set 5, the information about the game scores sent on each game end
static final String GW_LAST_GAME_SCORE = "rhiotTagGW.score";
static final String GW_LAST_GAME_SCORE_HITS = "rhiotTagGW.hits";
static final String GW_LAST_GAME_SCORE_TAG_ADDRESS = "rhiotTagGW.scoreTagAddress";
// Set 6, the information about the high game scores sent on game end when a new high score is achieved
static final String GW_HIGH_GAME_SCORE = "rhiotTagGW.highScore";
static final String GW_HIGH_GAME_SCORE_HITS = "rhiotTagGW.highScoreHits";
static final String GW_HIGH_GAME_SCORE_TAG_ADDRESS = "rhiotTagGame.highScoreTagAddress";
// REST endpoints
String CLOUD_PW_PATH = "/cloud-password";
String TAG_INFO_PATH = "/tags";
String GAMESM_DIGRAPH_PATH = "/gamesm-digraph";
String GAMESM_INFO_PATH = "/gamesm";
}
|
...
* Created by sstark on 5/25/16.
*/
public interface IRHIoTTagScanner {
// Metrics keys
// Set 1, the tag data sent on every advertisement event
static final String TAG_TEMP = "rhiotTag.temperature";
static final String TAG_KEYS = "rhiotTag.keys";
static final String TAG_LUX = "rhiotTag.lux";
// Set 2, the game state information sent on an event such as a key press or state timeout
static final String TAG_PREV_STATE = "rhiotTag.prevState";
static final String TAG_NEW_STATE = "rhiotTag.newState";
static final String TAG_EVENT = "rhiotTag.event";
// Set 3, the game progress information sent while the game is active.
static final String TAG_GAME_TIME_LEFT = "rhiotTag.gameTimeLeft";
static final String TAG_GAME_SCORE = "rhiotTag.gameScore";
static final String TAG_SHOOTING_TIME_LEFT = "rhiotTag.shootingTimeLeft";
static final String TAG_SHOTS_LEFT = "rhiotTag.shotsLeft";
// Set 4, the information about a hit on the light sensor sent when a sensor reading above a threshold value is detected
static final String TAG_HIT_SCORE = "rhiotTag.hitScore";
static final String TAG_HIT_RINGS_OFF_CENTER = "rhiotTag.hitRingsOffCenter";
// Set 5, the information about the game scores sent on each game end
static final String GW_LAST_GAME_SCORE = "rhiotTagGW.score";
static final String GW_LAST_GAME_SCORE_HITS = "rhiotTagGW.hits";
static final String GW_LAST_GAME_SCORE_TAG_ADDRESS = "rhiotTagGW.scoreTagAddress";
// Set 6, the information about the high game scores sent on game end when a new high score is achieved
static final String GW_HIGH_GAME_SCORE = "rhiotTagGW.highScore";
static final String GW_HIGH_GAME_SCORE_HITS = "rhiotTagGW.highScoreHits";
static final String GW_HIGH_GAME_SCORE_TAG_ADDRESS = "rhiotTagGame.highScoreTagAddress";
// REST endpoints
String CLOUD_PW_PATH = "/cloud-password";
String TAG_INFO_PATH = "/tags";
String GAMESM_DIGRAPH_PATH = "/gamesm-digraph";
String GAMESM_INFO_PATH = "/gamesm";
}
...
|
82cbe36e00f2a363c1d613b1aa0ffc5f7550adc1
|
main.py
|
main.py
|
import numpy as np
import pandas as pd
from transform import transform
# Load the questions
questions = pd.read_csv('questions.csv')
# Initialise the position of the user at the origin
pos = np.zeros(3)
input_text = 'Enter response from -2 (strongly disagree) to +2 (strongly agree): '
# Using a C-style loop over questions without apology
for i in range(0, questions.shape[0]):
# Check the question satisfies a basic sanity check
norm = np.linalg.norm(questions.iloc[i, 1:])
if norm > 2.:
print('# WARNING: Very influential question.')
elif norm < 0.5:
print('# WARNING: Very uninfluential question.')
# Print the question
print('\nQuestion {k}/{n}:\n'.format(k=i+1, n=questions.shape[0]))
print(questions.iloc[i, 0] + '\n')
# Get the user's response
response = None # Placeholder value
while response < -2. or response > 2.:
response = input(input_text)
# Increment the user's position
pos += response*questions.iloc[i, 1:].values
# Apply some scaling to the position based on how far it was possible
# to move in each dimension
print(pos)
pos = transform(pos, questions)[0]
print('Your position in 3D is ' + str(pos) + '.')
|
import numpy as np
import pandas as pd
from transform import transform
# Load the questions
questions = pd.read_csv('questions.csv')
# Initialise the position of the user at the origin
pos = np.zeros(3)
input_text = 'Enter response from -2 (strongly disagree) to +2 (strongly agree): '
# Using a C-style loop over questions without apology
for i in range(0, questions.shape[0]):
# Check the question satisfies a basic sanity check
norm = np.linalg.norm(questions.iloc[i, 1:])
if norm > 2.:
print('# WARNING: Very influential question.')
elif norm < 0.5:
print('# WARNING: Very uninfluential question.')
# Print the question
print('\nQuestion {k}/{n}:\n'.format(k=i+1, n=questions.shape[0]))
print(questions.iloc[i, 0] + '\n')
# Get the user's response
response = None # Placeholder value
while response is None or response < -2. or response > 2.:
response = float(input(input_text))
# Increment the user's position
pos += response*questions.iloc[i, 1:].values
# Apply some scaling to the position based on how far it was possible
# to move in each dimension
print(pos)
pos = transform(pos, questions)[0]
print('Your position in 3D is ' + str(pos) + '.')
|
Correct for older Python3 version errors
|
Correct for older Python3 version errors
|
Python
|
mit
|
eggplantbren/StatisticalCompass,eggplantbren/StatisticalCompass,eggplantbren/StatisticalCompass
|
python
|
## Code Before:
import numpy as np
import pandas as pd
from transform import transform
# Load the questions
questions = pd.read_csv('questions.csv')
# Initialise the position of the user at the origin
pos = np.zeros(3)
input_text = 'Enter response from -2 (strongly disagree) to +2 (strongly agree): '
# Using a C-style loop over questions without apology
for i in range(0, questions.shape[0]):
# Check the question satisfies a basic sanity check
norm = np.linalg.norm(questions.iloc[i, 1:])
if norm > 2.:
print('# WARNING: Very influential question.')
elif norm < 0.5:
print('# WARNING: Very uninfluential question.')
# Print the question
print('\nQuestion {k}/{n}:\n'.format(k=i+1, n=questions.shape[0]))
print(questions.iloc[i, 0] + '\n')
# Get the user's response
response = None # Placeholder value
while response < -2. or response > 2.:
response = input(input_text)
# Increment the user's position
pos += response*questions.iloc[i, 1:].values
# Apply some scaling to the position based on how far it was possible
# to move in each dimension
print(pos)
pos = transform(pos, questions)[0]
print('Your position in 3D is ' + str(pos) + '.')
## Instruction:
Correct for older Python3 version errors
## Code After:
import numpy as np
import pandas as pd
from transform import transform
# Load the questions
questions = pd.read_csv('questions.csv')
# Initialise the position of the user at the origin
pos = np.zeros(3)
input_text = 'Enter response from -2 (strongly disagree) to +2 (strongly agree): '
# Using a C-style loop over questions without apology
for i in range(0, questions.shape[0]):
# Check the question satisfies a basic sanity check
norm = np.linalg.norm(questions.iloc[i, 1:])
if norm > 2.:
print('# WARNING: Very influential question.')
elif norm < 0.5:
print('# WARNING: Very uninfluential question.')
# Print the question
print('\nQuestion {k}/{n}:\n'.format(k=i+1, n=questions.shape[0]))
print(questions.iloc[i, 0] + '\n')
# Get the user's response
response = None # Placeholder value
while response is None or response < -2. or response > 2.:
response = float(input(input_text))
# Increment the user's position
pos += response*questions.iloc[i, 1:].values
# Apply some scaling to the position based on how far it was possible
# to move in each dimension
print(pos)
pos = transform(pos, questions)[0]
print('Your position in 3D is ' + str(pos) + '.')
|
# ... existing code ...
# Get the user's response
response = None # Placeholder value
while response is None or response < -2. or response > 2.:
response = float(input(input_text))
# Increment the user's position
pos += response*questions.iloc[i, 1:].values
# ... rest of the code ...
|
cc62a1eea746a7191b4a07a48dcf55f4c76787ee
|
asyncpg/__init__.py
|
asyncpg/__init__.py
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
|
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
Use loop.create_future if it exists
|
Use loop.create_future if it exists
|
Python
|
apache-2.0
|
MagicStack/asyncpg,MagicStack/asyncpg
|
python
|
## Code Before:
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = asyncio.Future(loop=self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = asyncio.Future(loop=self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
## Instruction:
Use loop.create_future if it exists
## Code After:
import asyncio
from .protocol import Protocol
__all__ = ('connect',)
class Connection:
def __init__(self, protocol, transport, loop):
self._protocol = protocol
self._transport = transport
self._loop = loop
def get_settings(self):
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
class PreparedStatement:
def __init__(self, connection, state):
self._connection = connection
self._state = state
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
async def connect(host='localhost', port=5432, user='postgres', *,
loop=None):
if loop is None:
loop = asyncio.get_event_loop()
connected = asyncio.Future(loop=loop)
tr, pr = await loop.create_connection(
lambda: Protocol(connected, user, loop),
host, port)
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
|
// ... existing code ...
return self._protocol.get_settings()
async def query(self, query):
waiter = _create_future(self._loop)
self._protocol.query(query, waiter)
return await waiter
async def prepare(self, query):
waiter = _create_future(self._loop)
self._protocol.prepare(None, query, waiter)
state = await waiter
return PreparedStatement(self, state)
// ... modified code ...
async def execute(self, *args):
protocol = self._connection._protocol
waiter = _create_future(self._connection._loop)
protocol.execute(self._state, args, waiter)
return await waiter
...
await connected
return Connection(pr, tr, loop)
def _create_future(loop):
try:
create_future = loop.create_future
except AttributeError:
return asyncio.Future(loop=loop)
else:
return create_future()
// ... rest of the code ...
|
ef8a6616876ee044d07cf8f30b51af0cbb2bc7e4
|
geozones/factories.py
|
geozones/factories.py
|
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
cityId = factory.SubFactory(CityFactory)
slug = factory.LazyAttribute(lambda a: a.name.lower())
zoomLvl = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
slug = factory.LazyAttribute(lambda a: a.name.lower())
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
zoom = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
Fix region factory to reflect region model
|
Fix region factory to reflect region model
|
Python
|
mit
|
sarutobi/Rynda,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa
|
python
|
## Code Before:
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
cityId = factory.SubFactory(CityFactory)
slug = factory.LazyAttribute(lambda a: a.name.lower())
zoomLvl = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
## Instruction:
Fix region factory to reflect region model
## Code After:
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
slug = factory.LazyAttribute(lambda a: a.name.lower())
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
zoom = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
...
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
slug = factory.LazyAttribute(lambda a: a.name.lower())
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
zoom = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
...
|
a65eaeaef60492bfc6319fb9c810155d62c1a3b3
|
luigi/tasks/export/ftp/go_annotations.py
|
luigi/tasks/export/ftp/go_annotations.py
|
import luigi
from tasks.config import db
from tasks.config import export
from tasks.utils.files import atomic_output
class GoAnnotation(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
export(db(), out)
|
import luigi
from tasks.config import db
from tasks.config import export
from rnacentral.export.ftp import go_terms
from tasks.utils.files import atomic_output
class GoAnnotationExport(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
go_terms.export(db(), out)
|
Update name and call correct export
|
Update name and call correct export
This now calls the correct export function. Additionally, the class name
is changed to reflect it does export.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
python
|
## Code Before:
import luigi
from tasks.config import db
from tasks.config import export
from tasks.utils.files import atomic_output
class GoAnnotation(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
export(db(), out)
## Instruction:
Update name and call correct export
This now calls the correct export function. Additionally, the class name
is changed to reflect it does export.
## Code After:
import luigi
from tasks.config import db
from tasks.config import export
from rnacentral.export.ftp import go_terms
from tasks.utils.files import atomic_output
class GoAnnotationExport(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
go_terms.export(db(), out)
|
// ... existing code ...
from tasks.config import db
from tasks.config import export
from rnacentral.export.ftp import go_terms
from tasks.utils.files import atomic_output
class GoAnnotationExport(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
go_terms.export(db(), out)
// ... rest of the code ...
|
f061499b9d415b7471edf072c81b93ce5453494d
|
githubtrending/utils.py
|
githubtrending/utils.py
|
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
return map(int, os.popen('stty size', 'r').read().split())
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
with os.popen('stty size', 'r') as f:
size = map(int, f.read().split())
return size
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
Refactor get_console_size to close file after reading
|
Utils: Refactor get_console_size to close file after reading
|
Python
|
mit
|
staranjeet/github-trending-cli
|
python
|
## Code Before:
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
return map(int, os.popen('stty size', 'r').read().split())
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
## Instruction:
Utils: Refactor get_console_size to close file after reading
## Code After:
import os
def get_console_size():
'''
returns no of rows, no of cols
'''
with os.popen('stty size', 'r') as f:
size = map(int, f.read().split())
return size
def get_print_size_for_repo(data):
name, lang, star = [0]*3
for each in data:
repo_name, desc, [stars, language] = each
name = max(len(repo_name), name)
lang = max(len(language), lang)
star = max(len(stars), star)
return {
"NAME": name+1,
"LANG": lang+1,
"STAR": star+1,
"IDX": 3,
}
def get_print_size_for_dev(data):
dev, repo = [0]*2
for each in data:
dev_name, repo_name, description = each
dev = max(len(dev_name), dev)
repo = max(len(repo_name), repo)
return {
"DEV": dev+1,
"REPO": repo+1,
"IDX": 3,
}
def get_color_code():
return {
"IDX": "white",
"NAME": "yellow",
"LANG": "red",
"STARS": "green",
"DESC": "blue",
"REPO": "green",
}
|
// ... existing code ...
'''
returns no of rows, no of cols
'''
with os.popen('stty size', 'r') as f:
size = map(int, f.read().split())
return size
def get_print_size_for_repo(data):
name, lang, star = [0]*3
// ... rest of the code ...
|
0cdb7a0baa6e4f00b3b54cb49701175cdb3c8a05
|
entities/filters.py
|
entities/filters.py
|
from . import forms
import django_filters as filters
class Group(filters.FilterSet):
name = filters.CharFilter(lookup_expr='icontains')
class Meta:
form = forms.GroupFilter
|
from . import forms
import django_filters as filters
from features.groups import models
class Group(filters.FilterSet):
name = filters.CharFilter(label='Name', lookup_expr='icontains')
class Meta:
model = models.Group
fields = ['name']
form = forms.GroupFilter
|
Fix filter for django-filter 1.0
|
Fix filter for django-filter 1.0
|
Python
|
agpl-3.0
|
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
|
python
|
## Code Before:
from . import forms
import django_filters as filters
class Group(filters.FilterSet):
name = filters.CharFilter(lookup_expr='icontains')
class Meta:
form = forms.GroupFilter
## Instruction:
Fix filter for django-filter 1.0
## Code After:
from . import forms
import django_filters as filters
from features.groups import models
class Group(filters.FilterSet):
name = filters.CharFilter(label='Name', lookup_expr='icontains')
class Meta:
model = models.Group
fields = ['name']
form = forms.GroupFilter
|
// ... existing code ...
from . import forms
import django_filters as filters
from features.groups import models
class Group(filters.FilterSet):
name = filters.CharFilter(label='Name', lookup_expr='icontains')
class Meta:
model = models.Group
fields = ['name']
form = forms.GroupFilter
// ... rest of the code ...
|
7c0c5631ff9f2d3511b7c460d22516b5b0393697
|
setup.py
|
setup.py
|
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.1'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
packages=['linersock'],
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'])
|
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.2'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'],
packages=['linersock'],
install_requires=[
'six',
],
)
|
Add six as a dependency.
|
Add six as a dependency.
|
Python
|
mit
|
kalekundert/linersock,kalekundert/linersock
|
python
|
## Code Before:
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.1'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
packages=['linersock'],
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'])
## Instruction:
Add six as a dependency.
## Code After:
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.2'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'],
packages=['linersock'],
install_requires=[
'six',
],
)
|
...
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.2'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'],
packages=['linersock'],
install_requires=[
'six',
],
)
...
|
777ed567d43f6a3c9bbee376e9e1a4b9244f9bce
|
src/main/java/de/retest/recheck/ui/descriptors/AttributeUtil.java
|
src/main/java/de/retest/recheck/ui/descriptors/AttributeUtil.java
|
package de.retest.recheck.ui.descriptors;
import java.awt.Rectangle;
public class AttributeUtil {
public static Rectangle getOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
(OutlineAttribute) attributes.getAttribute( OutlineAttribute.RELATIVE_OUTLINE );
if ( outlineAttribute == null ) {
return null;
}
return outlineAttribute.getValue();
}
public static Rectangle getAbsoluteOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
(OutlineAttribute) attributes.getAttribute( OutlineAttribute.ABSOLUTE_OUTLINE );
if ( outlineAttribute == null ) {
return null;
}
return outlineAttribute.getValue();
}
}
|
package de.retest.recheck.ui.descriptors;
import java.awt.Rectangle;
import java.util.List;
import de.retest.recheck.ui.diff.AttributeDifference;
import de.retest.recheck.ui.diff.ElementDifference;
import de.retest.recheck.ui.diff.IdentifyingAttributesDifference;
public class AttributeUtil {
public static Rectangle getActualOutline( final ElementDifference difference ) {
final Rectangle actualRelative = getActualOutline( difference, OutlineAttribute.RELATIVE_OUTLINE );
if ( actualRelative != null ) {
return actualRelative;
}
return getOutline( difference.getIdentifyingAttributes() );
}
public static Rectangle getActualAbsoluteOutline( final ElementDifference difference ) {
final Rectangle actualAbsolute = getActualOutline( difference, OutlineAttribute.ABSOLUTE_OUTLINE );
if ( actualAbsolute != null ) {
return actualAbsolute;
}
return getAbsoluteOutline( difference.getIdentifyingAttributes() );
}
public static Rectangle getOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
(OutlineAttribute) attributes.getAttribute( OutlineAttribute.RELATIVE_OUTLINE );
if ( outlineAttribute == null ) {
return null;
}
return outlineAttribute.getValue();
}
public static Rectangle getAbsoluteOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
(OutlineAttribute) attributes.getAttribute( OutlineAttribute.ABSOLUTE_OUTLINE );
if ( outlineAttribute == null ) {
return null;
}
return outlineAttribute.getValue();
}
private static Rectangle getActualOutline( final ElementDifference difference, final String type ) {
final IdentifyingAttributesDifference identifyingAttributesDifference =
(IdentifyingAttributesDifference) difference.getIdentifyingAttributesDifference();
if ( identifyingAttributesDifference != null ) {
final List<AttributeDifference> attributeDifferences =
identifyingAttributesDifference.getAttributeDifferences();
if ( attributeDifferences != null ) {
for ( final AttributeDifference aDiff : attributeDifferences ) {
if ( aDiff.getKey().equals( type ) ) {
return ((Rectangle) aDiff.getActual());
}
}
}
}
return null;
}
}
|
Add methods for actual outline
|
Add methods for actual outline
|
Java
|
agpl-3.0
|
retest/recheck,retest/recheck
|
java
|
## Code Before:
package de.retest.recheck.ui.descriptors;
import java.awt.Rectangle;
public class AttributeUtil {
public static Rectangle getOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
(OutlineAttribute) attributes.getAttribute( OutlineAttribute.RELATIVE_OUTLINE );
if ( outlineAttribute == null ) {
return null;
}
return outlineAttribute.getValue();
}
public static Rectangle getAbsoluteOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
(OutlineAttribute) attributes.getAttribute( OutlineAttribute.ABSOLUTE_OUTLINE );
if ( outlineAttribute == null ) {
return null;
}
return outlineAttribute.getValue();
}
}
## Instruction:
Add methods for actual outline
## Code After:
package de.retest.recheck.ui.descriptors;
import java.awt.Rectangle;
import java.util.List;
import de.retest.recheck.ui.diff.AttributeDifference;
import de.retest.recheck.ui.diff.ElementDifference;
import de.retest.recheck.ui.diff.IdentifyingAttributesDifference;
public class AttributeUtil {
public static Rectangle getActualOutline( final ElementDifference difference ) {
final Rectangle actualRelative = getActualOutline( difference, OutlineAttribute.RELATIVE_OUTLINE );
if ( actualRelative != null ) {
return actualRelative;
}
return getOutline( difference.getIdentifyingAttributes() );
}
public static Rectangle getActualAbsoluteOutline( final ElementDifference difference ) {
final Rectangle actualAbsolute = getActualOutline( difference, OutlineAttribute.ABSOLUTE_OUTLINE );
if ( actualAbsolute != null ) {
return actualAbsolute;
}
return getAbsoluteOutline( difference.getIdentifyingAttributes() );
}
public static Rectangle getOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
(OutlineAttribute) attributes.getAttribute( OutlineAttribute.RELATIVE_OUTLINE );
if ( outlineAttribute == null ) {
return null;
}
return outlineAttribute.getValue();
}
public static Rectangle getAbsoluteOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
(OutlineAttribute) attributes.getAttribute( OutlineAttribute.ABSOLUTE_OUTLINE );
if ( outlineAttribute == null ) {
return null;
}
return outlineAttribute.getValue();
}
private static Rectangle getActualOutline( final ElementDifference difference, final String type ) {
final IdentifyingAttributesDifference identifyingAttributesDifference =
(IdentifyingAttributesDifference) difference.getIdentifyingAttributesDifference();
if ( identifyingAttributesDifference != null ) {
final List<AttributeDifference> attributeDifferences =
identifyingAttributesDifference.getAttributeDifferences();
if ( attributeDifferences != null ) {
for ( final AttributeDifference aDiff : attributeDifferences ) {
if ( aDiff.getKey().equals( type ) ) {
return ((Rectangle) aDiff.getActual());
}
}
}
}
return null;
}
}
|
// ... existing code ...
package de.retest.recheck.ui.descriptors;
import java.awt.Rectangle;
import java.util.List;
import de.retest.recheck.ui.diff.AttributeDifference;
import de.retest.recheck.ui.diff.ElementDifference;
import de.retest.recheck.ui.diff.IdentifyingAttributesDifference;
public class AttributeUtil {
public static Rectangle getActualOutline( final ElementDifference difference ) {
final Rectangle actualRelative = getActualOutline( difference, OutlineAttribute.RELATIVE_OUTLINE );
if ( actualRelative != null ) {
return actualRelative;
}
return getOutline( difference.getIdentifyingAttributes() );
}
public static Rectangle getActualAbsoluteOutline( final ElementDifference difference ) {
final Rectangle actualAbsolute = getActualOutline( difference, OutlineAttribute.ABSOLUTE_OUTLINE );
if ( actualAbsolute != null ) {
return actualAbsolute;
}
return getAbsoluteOutline( difference.getIdentifyingAttributes() );
}
public static Rectangle getOutline( final IdentifyingAttributes attributes ) {
final OutlineAttribute outlineAttribute =
// ... modified code ...
}
return outlineAttribute.getValue();
}
private static Rectangle getActualOutline( final ElementDifference difference, final String type ) {
final IdentifyingAttributesDifference identifyingAttributesDifference =
(IdentifyingAttributesDifference) difference.getIdentifyingAttributesDifference();
if ( identifyingAttributesDifference != null ) {
final List<AttributeDifference> attributeDifferences =
identifyingAttributesDifference.getAttributeDifferences();
if ( attributeDifferences != null ) {
for ( final AttributeDifference aDiff : attributeDifferences ) {
if ( aDiff.getKey().equals( type ) ) {
return ((Rectangle) aDiff.getActual());
}
}
}
}
return null;
}
}
// ... rest of the code ...
|
84990a4ef20c2e0f42133ed06ade5ce2d4e98ae3
|
chmvh_website/team/models.py
|
chmvh_website/team/models.py
|
from django.db import models
def team_member_image_name(instance, filename):
return 'team/{0}'.format(instance.name)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
import os
from django.db import models
def team_member_image_name(instance, filename):
_, ext = os.path.splitext(filename)
return 'team/{0}{1}'.format(instance.name, ext)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
Save team member picture with extension.
|
Save team member picture with extension.
|
Python
|
mit
|
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
|
python
|
## Code Before:
from django.db import models
def team_member_image_name(instance, filename):
return 'team/{0}'.format(instance.name)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
## Instruction:
Save team member picture with extension.
## Code After:
import os
from django.db import models
def team_member_image_name(instance, filename):
_, ext = os.path.splitext(filename)
return 'team/{0}{1}'.format(instance.name, ext)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
# ... existing code ...
import os
from django.db import models
def team_member_image_name(instance, filename):
_, ext = os.path.splitext(filename)
return 'team/{0}{1}'.format(instance.name, ext)
class TeamMember(models.Model):
# ... rest of the code ...
|
3d5a16bf49523dd2f6c832829d127e809ca1fa68
|
framework/util/GuitarString.java
|
framework/util/GuitarString.java
|
package framework.util;
import framework.util.RingBuffer;
import framework.generators.Generator;
/**
* Models a guitar string using the Karplus-Strong algorithm.
*/
public class GuitarString {
/**
* Create a GuitarString of the given frequency.
* @param f the frequency
*/
public GuitarString(Double f) {
N = (int) (Generator.SAMPLE_RATE / f);
buffer = new RingBuffer(N);
// Guitar string at rest
for(int i = 0; i < N; i++)
buffer.enqueue(0.);
}
/**
* Models the plucking of the string.
*/
public void pluck() {
for(int i = 0; i < N; i++) {
// Enqueue random value between -0.5 and 0.5 (noise)
buffer.enqueue(Math.random() - 0.5);
}
}
/**
* Apply the Karplus-Strong update.
*/
public void tic() {
Double first = buffer.dequeue();
buffer.enqueue((first * buffer.peek()) * 0.5 * ENERGY_DECAY_FACTOR);
}
/**
* Get the value at the front of the buffer.
* @return the value at the front of the buffer
*/
public Double sample() {
return buffer.peek();
}
RingBuffer buffer;
int N;
final Double ENERGY_DECAY_FACTOR = 0.996;
}
|
package framework.util;
import framework.util.RingBuffer;
import framework.generators.Generator;
/**
* Models a guitar string using the Karplus-Strong algorithm.
*/
public class GuitarString {
/**
* Create a GuitarString of the given frequency.
* @param f the frequency
*/
public GuitarString(Double f) {
N = (int) (Generator.SAMPLE_RATE / f);
buffer = new RingBuffer(N);
// Guitar string at rest
for(int i = 0; i < N; i++)
buffer.enqueue(0.);
}
/**
* Models the plucking of the string.
*/
public void pluck() {
for(int i = 0; i < N; i++) {
// Enqueue random value between -0.5 and 0.5 (noise)
buffer.enqueue(Math.random() - 0.5);
}
}
/**
* Apply the Karplus-Strong update.
*/
public void tic() {
buffer.enqueue((buffer.dequeue() + buffer.peek()) / 2 * ENERGY_DECAY_FACTOR);
}
/**
* Get the value at the front of the buffer.
* @return the value at the front of the buffer
*/
public Double sample() {
return buffer.peek();
}
RingBuffer buffer;
int N;
final Double ENERGY_DECAY_FACTOR = 0.996;
}
|
Correct THE stupid mistake. Now we have a string!
|
Correct THE stupid mistake. Now we have a string!
|
Java
|
mit
|
remigourdon/sound-editors
|
java
|
## Code Before:
package framework.util;
import framework.util.RingBuffer;
import framework.generators.Generator;
/**
* Models a guitar string using the Karplus-Strong algorithm.
*/
public class GuitarString {
/**
* Create a GuitarString of the given frequency.
* @param f the frequency
*/
public GuitarString(Double f) {
N = (int) (Generator.SAMPLE_RATE / f);
buffer = new RingBuffer(N);
// Guitar string at rest
for(int i = 0; i < N; i++)
buffer.enqueue(0.);
}
/**
* Models the plucking of the string.
*/
public void pluck() {
for(int i = 0; i < N; i++) {
// Enqueue random value between -0.5 and 0.5 (noise)
buffer.enqueue(Math.random() - 0.5);
}
}
/**
* Apply the Karplus-Strong update.
*/
public void tic() {
Double first = buffer.dequeue();
buffer.enqueue((first * buffer.peek()) * 0.5 * ENERGY_DECAY_FACTOR);
}
/**
* Get the value at the front of the buffer.
* @return the value at the front of the buffer
*/
public Double sample() {
return buffer.peek();
}
RingBuffer buffer;
int N;
final Double ENERGY_DECAY_FACTOR = 0.996;
}
## Instruction:
Correct THE stupid mistake. Now we have a string!
## Code After:
package framework.util;
import framework.util.RingBuffer;
import framework.generators.Generator;
/**
* Models a guitar string using the Karplus-Strong algorithm.
*/
public class GuitarString {
/**
* Create a GuitarString of the given frequency.
* @param f the frequency
*/
public GuitarString(Double f) {
N = (int) (Generator.SAMPLE_RATE / f);
buffer = new RingBuffer(N);
// Guitar string at rest
for(int i = 0; i < N; i++)
buffer.enqueue(0.);
}
/**
* Models the plucking of the string.
*/
public void pluck() {
for(int i = 0; i < N; i++) {
// Enqueue random value between -0.5 and 0.5 (noise)
buffer.enqueue(Math.random() - 0.5);
}
}
/**
* Apply the Karplus-Strong update.
*/
public void tic() {
buffer.enqueue((buffer.dequeue() + buffer.peek()) / 2 * ENERGY_DECAY_FACTOR);
}
/**
* Get the value at the front of the buffer.
* @return the value at the front of the buffer
*/
public Double sample() {
return buffer.peek();
}
RingBuffer buffer;
int N;
final Double ENERGY_DECAY_FACTOR = 0.996;
}
|
...
* Apply the Karplus-Strong update.
*/
public void tic() {
buffer.enqueue((buffer.dequeue() + buffer.peek()) / 2 * ENERGY_DECAY_FACTOR);
}
/**
...
|
b6514217b53ea83a4f076cd7b2bc4297ab35cec4
|
src/main/java/org/techern/dbsg/DyedBlockStateGenerator.java
|
src/main/java/org/techern/dbsg/DyedBlockStateGenerator.java
|
package org.techern.dbsg;
import java.util.logging.Logger;
/**
* DyedBlockStateGenerator; A block state generator for dyed blocks
*
* @since 0.0.1
*/
public class DyedBlockStateGenerator {
/**
* The {@link Logger} used by {@link DyedBlockStateGenerator}
*
* @since 0.0.1
*/
public static Logger LOGGER = Logger.getLogger("Generator");
/**
* Runs {@link DyedBlockStateGenerator}
*
* @param arguments The list of arguments
* @since 0.0.1
*/
public static void main(String... arguments) {
LOGGER.info("Starting the dyed block state generator...");
}
}
|
package org.techern.dbsg;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.logging.Logger;
/**
* DyedBlockStateGenerator; A block state generator for dyed blocks
*
* @since 0.0.1
*/
public class DyedBlockStateGenerator {
/**
* The {@link Logger} used by {@link DyedBlockStateGenerator}
*
* @since 0.0.1
*/
public static Logger LOGGER = Logger.getLogger("Generator");
/**
* Runs {@link DyedBlockStateGenerator}
*
* @param arguments The list of arguments
* @since 0.0.1
*/
public static void main(String... arguments) throws IOException {
LOGGER.info("Starting the dyed block state generator...");
Path rootPath = FileSystems.getDefault().getPath(".");
Path templatePath = rootPath.resolve("templates");
Path outputPath = rootPath.resolve("generated");
LOGGER.info("Template path is " + templatePath.toString() + ", output path is " + outputPath.toString());
if (Files.notExists(templatePath)) {
Files.createDirectory(templatePath);
LOGGER.warning("Template folder does not exist; Creating");
}
if (Files.notExists(outputPath)) {
Files.createDirectory(outputPath);
LOGGER.warning("Output folder does not exist; Creating");
}
}
}
|
Check for and create directories
|
Check for and create directories
|
Java
|
mit
|
Techern/Dyed-block-state-generator
|
java
|
## Code Before:
package org.techern.dbsg;
import java.util.logging.Logger;
/**
* DyedBlockStateGenerator; A block state generator for dyed blocks
*
* @since 0.0.1
*/
public class DyedBlockStateGenerator {
/**
* The {@link Logger} used by {@link DyedBlockStateGenerator}
*
* @since 0.0.1
*/
public static Logger LOGGER = Logger.getLogger("Generator");
/**
* Runs {@link DyedBlockStateGenerator}
*
* @param arguments The list of arguments
* @since 0.0.1
*/
public static void main(String... arguments) {
LOGGER.info("Starting the dyed block state generator...");
}
}
## Instruction:
Check for and create directories
## Code After:
package org.techern.dbsg;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.logging.Logger;
/**
* DyedBlockStateGenerator; A block state generator for dyed blocks
*
* @since 0.0.1
*/
public class DyedBlockStateGenerator {
/**
* The {@link Logger} used by {@link DyedBlockStateGenerator}
*
* @since 0.0.1
*/
public static Logger LOGGER = Logger.getLogger("Generator");
/**
* Runs {@link DyedBlockStateGenerator}
*
* @param arguments The list of arguments
* @since 0.0.1
*/
public static void main(String... arguments) throws IOException {
LOGGER.info("Starting the dyed block state generator...");
Path rootPath = FileSystems.getDefault().getPath(".");
Path templatePath = rootPath.resolve("templates");
Path outputPath = rootPath.resolve("generated");
LOGGER.info("Template path is " + templatePath.toString() + ", output path is " + outputPath.toString());
if (Files.notExists(templatePath)) {
Files.createDirectory(templatePath);
LOGGER.warning("Template folder does not exist; Creating");
}
if (Files.notExists(outputPath)) {
Files.createDirectory(outputPath);
LOGGER.warning("Output folder does not exist; Creating");
}
}
}
|
...
package org.techern.dbsg;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.logging.Logger;
/**
...
* @param arguments The list of arguments
* @since 0.0.1
*/
public static void main(String... arguments) throws IOException {
LOGGER.info("Starting the dyed block state generator...");
Path rootPath = FileSystems.getDefault().getPath(".");
Path templatePath = rootPath.resolve("templates");
Path outputPath = rootPath.resolve("generated");
LOGGER.info("Template path is " + templatePath.toString() + ", output path is " + outputPath.toString());
if (Files.notExists(templatePath)) {
Files.createDirectory(templatePath);
LOGGER.warning("Template folder does not exist; Creating");
}
if (Files.notExists(outputPath)) {
Files.createDirectory(outputPath);
LOGGER.warning("Output folder does not exist; Creating");
}
}
}
...
|
bdf7e1227a970b85c6c523bf762a843ac2b3dbc5
|
crepuscolo-web/src/main/java/net/etalia/crepuscolo/auth/RefreshAuthTokenFilter.java
|
crepuscolo-web/src/main/java/net/etalia/crepuscolo/auth/RefreshAuthTokenFilter.java
|
package net.etalia.crepuscolo.auth;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;
public class RefreshAuthTokenFilter implements Filter {
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
chain.doFilter(request, response);
AuthData auth = AuthFilter.getAuthData();
if (auth.getCurrentToken() != null) {
if (response instanceof HttpServletResponse) {
String newToken = AuthData.produce(auth.getUserId(), auth.getUserPassword(), auth.getSystemId());
((HttpServletResponse) response).setHeader("X-Authorization", newToken);
}
}
}
@Override
public void destroy() {
}
}
|
package net.etalia.crepuscolo.auth;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;
public class RefreshAuthTokenFilter implements Filter {
private long maxTokenTime = -1;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
if (filterConfig.getInitParameter("maxTokenTime") != null) {
maxTokenTime = Long.parseLong(filterConfig.getInitParameter("maxTokenTime"));
}
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
chain.doFilter(request, response);
AuthData auth = AuthFilter.getAuthData();
if (auth.getCurrentToken() != null) {
if (response instanceof HttpServletResponse) {
if (maxTokenTime != -1 && System.currentTimeMillis() < auth.getTimeStamp() + maxTokenTime) {
String newToken = AuthData.produce(auth.getUserId(), auth.getUserPassword(), auth.getSystemId());
((HttpServletResponse) response).setHeader("X-Authorization", newToken);
}
}
}
}
@Override
public void destroy() {
}
}
|
Refresh occur only if token is not expired
|
Refresh occur only if token is not expired
|
Java
|
apache-2.0
|
madama/crepuscolo
|
java
|
## Code Before:
package net.etalia.crepuscolo.auth;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;
public class RefreshAuthTokenFilter implements Filter {
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
chain.doFilter(request, response);
AuthData auth = AuthFilter.getAuthData();
if (auth.getCurrentToken() != null) {
if (response instanceof HttpServletResponse) {
String newToken = AuthData.produce(auth.getUserId(), auth.getUserPassword(), auth.getSystemId());
((HttpServletResponse) response).setHeader("X-Authorization", newToken);
}
}
}
@Override
public void destroy() {
}
}
## Instruction:
Refresh occur only if token is not expired
## Code After:
package net.etalia.crepuscolo.auth;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;
public class RefreshAuthTokenFilter implements Filter {
private long maxTokenTime = -1;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
if (filterConfig.getInitParameter("maxTokenTime") != null) {
maxTokenTime = Long.parseLong(filterConfig.getInitParameter("maxTokenTime"));
}
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
chain.doFilter(request, response);
AuthData auth = AuthFilter.getAuthData();
if (auth.getCurrentToken() != null) {
if (response instanceof HttpServletResponse) {
if (maxTokenTime != -1 && System.currentTimeMillis() < auth.getTimeStamp() + maxTokenTime) {
String newToken = AuthData.produce(auth.getUserId(), auth.getUserPassword(), auth.getSystemId());
((HttpServletResponse) response).setHeader("X-Authorization", newToken);
}
}
}
}
@Override
public void destroy() {
}
}
|
# ... existing code ...
public class RefreshAuthTokenFilter implements Filter {
private long maxTokenTime = -1;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
if (filterConfig.getInitParameter("maxTokenTime") != null) {
maxTokenTime = Long.parseLong(filterConfig.getInitParameter("maxTokenTime"));
}
}
@Override
# ... modified code ...
AuthData auth = AuthFilter.getAuthData();
if (auth.getCurrentToken() != null) {
if (response instanceof HttpServletResponse) {
if (maxTokenTime != -1 && System.currentTimeMillis() < auth.getTimeStamp() + maxTokenTime) {
String newToken = AuthData.produce(auth.getUserId(), auth.getUserPassword(), auth.getSystemId());
((HttpServletResponse) response).setHeader("X-Authorization", newToken);
}
}
}
}
# ... rest of the code ...
|
e5c078e0f278adfbe685df2d8e141a9f71ea7cc8
|
queue/linkedListImplementation/queue.h
|
queue/linkedListImplementation/queue.h
|
/*
* MAW 3.25.a Write the routines to implement queues using: Linked Lists
*
* We use a header node at the very beginning of the linked list.
*
* Front: | header node | -> | data node | -> | data node | :Rear
*/
#ifndef _QUEUE_H
#define _QUEUE_H
typedef int ET;
struct QueueRecord;
struct QueueCDT;
typedef struct QueueRecord* PtrToNode;
typedef struct QueueCDT* QueueADT; // naming convention: https://www.cs.bu.edu/teaching/c/queue/linked-list/types.html
int isEmpty(QueueADT Q);
QueueADT createQueue();
void disposeQueue(QueueADT Q);
void makeEmpty(QueueADT Q);
void enqueue(ET elem, QueueADT Q);
ET front(QueueADT Q);
void dequeue(QueueADT Q);
ET frontAndDequeue(QueueADT Q);
QueueADT initializeQueue(ET array[], int lengthArray);
void printQueue(QueueADT Q);
#endif
|
/*
* MAW 3.25.a Write the routines to implement queues using: Linked Lists
*
* We use a header node at the very beginning of the linked list.
*
* Front: | header node | -> | data node | -> | data node | :Rear
*/
#ifndef _QUEUE_H
#define _QUEUE_H
typedef int ET;
struct QueueRecord;
struct QueueCDT;
typedef struct QueueRecord* PtrToNode;
// CDT: concrete-type-of-a-queue
// ADT: abstract-type-of-a-queue
typedef struct QueueCDT* QueueADT; // naming convention: https://www.cs.bu.edu/teaching/c/queue/linked-list/types.html
int isEmpty(QueueADT Q);
QueueADT createQueue();
void disposeQueue(QueueADT Q);
void makeEmpty(QueueADT Q);
void enqueue(ET elem, QueueADT Q);
ET front(QueueADT Q);
void dequeue(QueueADT Q);
ET frontAndDequeue(QueueADT Q);
QueueADT initializeQueue(ET array[], int lengthArray);
void printQueue(QueueADT Q);
#endif
|
Add comment remarks to CDT & ADT
|
Add comment remarks to CDT & ADT
|
C
|
mit
|
xxks-kkk/algo,xxks-kkk/algo
|
c
|
## Code Before:
/*
* MAW 3.25.a Write the routines to implement queues using: Linked Lists
*
* We use a header node at the very beginning of the linked list.
*
* Front: | header node | -> | data node | -> | data node | :Rear
*/
#ifndef _QUEUE_H
#define _QUEUE_H
typedef int ET;
struct QueueRecord;
struct QueueCDT;
typedef struct QueueRecord* PtrToNode;
typedef struct QueueCDT* QueueADT; // naming convention: https://www.cs.bu.edu/teaching/c/queue/linked-list/types.html
int isEmpty(QueueADT Q);
QueueADT createQueue();
void disposeQueue(QueueADT Q);
void makeEmpty(QueueADT Q);
void enqueue(ET elem, QueueADT Q);
ET front(QueueADT Q);
void dequeue(QueueADT Q);
ET frontAndDequeue(QueueADT Q);
QueueADT initializeQueue(ET array[], int lengthArray);
void printQueue(QueueADT Q);
#endif
## Instruction:
Add comment remarks to CDT & ADT
## Code After:
/*
* MAW 3.25.a Write the routines to implement queues using: Linked Lists
*
* We use a header node at the very beginning of the linked list.
*
* Front: | header node | -> | data node | -> | data node | :Rear
*/
#ifndef _QUEUE_H
#define _QUEUE_H
typedef int ET;
struct QueueRecord;
struct QueueCDT;
typedef struct QueueRecord* PtrToNode;
// CDT: concrete-type-of-a-queue
// ADT: abstract-type-of-a-queue
typedef struct QueueCDT* QueueADT; // naming convention: https://www.cs.bu.edu/teaching/c/queue/linked-list/types.html
int isEmpty(QueueADT Q);
QueueADT createQueue();
void disposeQueue(QueueADT Q);
void makeEmpty(QueueADT Q);
void enqueue(ET elem, QueueADT Q);
ET front(QueueADT Q);
void dequeue(QueueADT Q);
ET frontAndDequeue(QueueADT Q);
QueueADT initializeQueue(ET array[], int lengthArray);
void printQueue(QueueADT Q);
#endif
|
# ... existing code ...
struct QueueRecord;
struct QueueCDT;
typedef struct QueueRecord* PtrToNode;
// CDT: concrete-type-of-a-queue
// ADT: abstract-type-of-a-queue
typedef struct QueueCDT* QueueADT; // naming convention: https://www.cs.bu.edu/teaching/c/queue/linked-list/types.html
int isEmpty(QueueADT Q);
# ... rest of the code ...
|
93f9ac90602fd70b02f262b9e705052270169ee3
|
goplayer-core/src/main/java/org/goplayer/move/StoneMove.java
|
goplayer-core/src/main/java/org/goplayer/move/StoneMove.java
|
package org.goplayer.move;
import org.goplayer.util.Coord;
public class StoneMove implements IMove {
private final Coord coord;
public StoneMove(int row, int col) {
this.coord = new Coord(row, col);
}
public int getRow() {
return coord.getRow();
}
public int getCol() {
return coord.getCol();
}
public Coord getCoord() {
return coord;
}
@Override
public String toString() {
return coord.toString();
}
}
|
package org.goplayer.move;
import org.goplayer.util.Coord;
public class StoneMove implements IMove {
private final Coord coord;
public StoneMove(int row, int col) {
this(new Coord(row, col));
}
public StoneMove(Coord coord) {
this.coord = coord;
}
public int getRow() {
return coord.getRow();
}
public int getCol() {
return coord.getCol();
}
public Coord getCoord() {
return coord;
}
@Override
public String toString() {
return coord.toString();
}
}
|
Replace inner row/col by coord + add a coord-based constructor.
|
Replace inner row/col by coord + add a coord-based constructor.
|
Java
|
bsd-3-clause
|
matthieu-vergne/Go-Player
|
java
|
## Code Before:
package org.goplayer.move;
import org.goplayer.util.Coord;
public class StoneMove implements IMove {
private final Coord coord;
public StoneMove(int row, int col) {
this.coord = new Coord(row, col);
}
public int getRow() {
return coord.getRow();
}
public int getCol() {
return coord.getCol();
}
public Coord getCoord() {
return coord;
}
@Override
public String toString() {
return coord.toString();
}
}
## Instruction:
Replace inner row/col by coord + add a coord-based constructor.
## Code After:
package org.goplayer.move;
import org.goplayer.util.Coord;
public class StoneMove implements IMove {
private final Coord coord;
public StoneMove(int row, int col) {
this(new Coord(row, col));
}
public StoneMove(Coord coord) {
this.coord = coord;
}
public int getRow() {
return coord.getRow();
}
public int getCol() {
return coord.getCol();
}
public Coord getCoord() {
return coord;
}
@Override
public String toString() {
return coord.toString();
}
}
|
...
private final Coord coord;
public StoneMove(int row, int col) {
this(new Coord(row, col));
}
public StoneMove(Coord coord) {
this.coord = coord;
}
public int getRow() {
...
public int getCol() {
return coord.getCol();
}
public Coord getCoord() {
return coord;
}
...
|
8c87a7287031cf18279d72b4dee1dd86c2fa0fb8
|
src/main/java/stream/flarebot/flarebot/util/Pair.java
|
src/main/java/stream/flarebot/flarebot/util/Pair.java
|
package stream.flarebot.flarebot.util;
public class Pair<K, V> {
private K key;
private V value;
public K getKey() {
return this.key;
}
public void setKey(K key) {
this.key = key;
}
public V getValue() {
return this.value;
}
public void setValue(V value) {
this.value = value;
}
public void setKeyValue(K key, V value) {
this.key = key;
this.value = value;
}
}
|
package stream.flarebot.flarebot.util;
public class Pair<K, V> {
private K key;
private V value;
public Pair(K key, V value) {
this.key = key;
this.value = value;
}
public Pair(){}
public K getKey() {
return this.key;
}
public void setKey(K key) {
this.key = key;
}
public V getValue() {
return this.value;
}
public void setValue(V value) {
this.value = value;
}
public void setKeyValue(K key, V value) {
this.key = key;
this.value = value;
}
}
|
Add a god damn constructor
|
Add a god damn constructor
|
Java
|
mit
|
weeryan17/FlareBot,binaryoverload/FlareBot,FlareBot/FlareBot
|
java
|
## Code Before:
package stream.flarebot.flarebot.util;
public class Pair<K, V> {
private K key;
private V value;
public K getKey() {
return this.key;
}
public void setKey(K key) {
this.key = key;
}
public V getValue() {
return this.value;
}
public void setValue(V value) {
this.value = value;
}
public void setKeyValue(K key, V value) {
this.key = key;
this.value = value;
}
}
## Instruction:
Add a god damn constructor
## Code After:
package stream.flarebot.flarebot.util;
public class Pair<K, V> {
private K key;
private V value;
public Pair(K key, V value) {
this.key = key;
this.value = value;
}
public Pair(){}
public K getKey() {
return this.key;
}
public void setKey(K key) {
this.key = key;
}
public V getValue() {
return this.value;
}
public void setValue(V value) {
this.value = value;
}
public void setKeyValue(K key, V value) {
this.key = key;
this.value = value;
}
}
|
# ... existing code ...
private K key;
private V value;
public Pair(K key, V value) {
this.key = key;
this.value = value;
}
public Pair(){}
public K getKey() {
return this.key;
# ... rest of the code ...
|
b65c5157c9e4515b01558201b983727d3a3154bd
|
src/syntax/relative_clauses.py
|
src/syntax/relative_clauses.py
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
self.has_wh_word |= tree.label() == "WHNP"
for node in tree:
self.parse_tree(node)
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
if tree.label() == "SBAR":
for node in tree:
if type(node) == Tree:
self.has_wh_word |= node.label() == "WHNP"
for node in tree:
self.parse_tree(node)
|
Fix detection of relative clause
|
Fix detection of relative clause
|
Python
|
mit
|
Somsubhra/Simplify,Somsubhra/Simplify,Somsubhra/Simplify
|
python
|
## Code Before:
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
self.has_wh_word |= tree.label() == "WHNP"
for node in tree:
self.parse_tree(node)
## Instruction:
Fix detection of relative clause
## Code After:
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The Relative clauses class
class RelativeClauses:
# Constructor for the Relative Clauses class
def __init__(self):
self.has_wh_word = False
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_wh_word = False
self.parse_tree(t)
print "Relative Clause: " + str(self.has_wh_word)
result_string = ""
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
if tree.label() == "SBAR":
for node in tree:
if type(node) == Tree:
self.has_wh_word |= node.label() == "WHNP"
for node in tree:
self.parse_tree(node)
|
...
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
if tree.label() == "SBAR":
for node in tree:
if type(node) == Tree:
self.has_wh_word |= node.label() == "WHNP"
for node in tree:
self.parse_tree(node)
...
|
f29477416729df9cc198f679a2478f6a077ce365
|
app/util.py
|
app/util.py
|
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
return wrapper
|
import inspect
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
wrapper.__signature__ = inspect.signature(func) # type: ignore
return wrapper
|
Make cached_function not overwrite signature of wrapped function
|
Make cached_function not overwrite signature of wrapped function
|
Python
|
mit
|
albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com
|
python
|
## Code Before:
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
return wrapper
## Instruction:
Make cached_function not overwrite signature of wrapped function
## Code After:
import inspect
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
wrapper.__signature__ = inspect.signature(func) # type: ignore
return wrapper
|
...
import inspect
import os
from typing import Any, Callable
...
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
wrapper.__signature__ = inspect.signature(func) # type: ignore
return wrapper
...
|
639824dfa86b2aa98b1ae2ca3d4a5cec6ca329ea
|
nbgrader/preprocessors/__init__.py
|
nbgrader/preprocessors/__init__.py
|
from .headerfooter import IncludeHeaderFooter
from .lockcells import LockCells
from .clearsolutions import ClearSolutions
from .findstudentid import FindStudentID
from .saveautogrades import SaveAutoGrades
from .displayautogrades import DisplayAutoGrades
from .computechecksums import ComputeChecksums
from .savecells import SaveCells
from .overwritecells import OverwriteCells
from .checkcellmetadata import CheckCellMetadata
from .execute import Execute
from .getgrades import GetGrades
|
from .headerfooter import IncludeHeaderFooter
from .lockcells import LockCells
from .clearsolutions import ClearSolutions
from .saveautogrades import SaveAutoGrades
from .displayautogrades import DisplayAutoGrades
from .computechecksums import ComputeChecksums
from .savecells import SaveCells
from .overwritecells import OverwriteCells
from .checkcellmetadata import CheckCellMetadata
from .execute import Execute
from .getgrades import GetGrades
|
Remove FindStudentID from preprocessors init
|
Remove FindStudentID from preprocessors init
|
Python
|
bsd-3-clause
|
EdwardJKim/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,alope107/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jdfreder/nbgrader,jdfreder/nbgrader,ellisonbg/nbgrader,alope107/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,modulexcite/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,dementrock/nbgrader
|
python
|
## Code Before:
from .headerfooter import IncludeHeaderFooter
from .lockcells import LockCells
from .clearsolutions import ClearSolutions
from .findstudentid import FindStudentID
from .saveautogrades import SaveAutoGrades
from .displayautogrades import DisplayAutoGrades
from .computechecksums import ComputeChecksums
from .savecells import SaveCells
from .overwritecells import OverwriteCells
from .checkcellmetadata import CheckCellMetadata
from .execute import Execute
from .getgrades import GetGrades
## Instruction:
Remove FindStudentID from preprocessors init
## Code After:
from .headerfooter import IncludeHeaderFooter
from .lockcells import LockCells
from .clearsolutions import ClearSolutions
from .saveautogrades import SaveAutoGrades
from .displayautogrades import DisplayAutoGrades
from .computechecksums import ComputeChecksums
from .savecells import SaveCells
from .overwritecells import OverwriteCells
from .checkcellmetadata import CheckCellMetadata
from .execute import Execute
from .getgrades import GetGrades
|
# ... existing code ...
from .headerfooter import IncludeHeaderFooter
from .lockcells import LockCells
from .clearsolutions import ClearSolutions
from .saveautogrades import SaveAutoGrades
from .displayautogrades import DisplayAutoGrades
from .computechecksums import ComputeChecksums
# ... rest of the code ...
|
2a086200b7644c3b3b869359c23366e7a3f36141
|
show_usbcamera.py
|
show_usbcamera.py
|
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
|
import cv2
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( image ) :
# Display the stereo image
cv2.imshow( 'Camera', image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.UsbCameraWidget()
# widget.show()
# sys.exit( application.exec_() )
# Initialize the stereo cameras
usbcamera = vt.UsbCamera( Callback )
# Lower the camera frame rate and resolution
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
# Start capture
usbcamera.start()
# Wait for user key press
raw_input( 'Press <enter> to stop the capture...' )
# Stop image acquisition
usbcamera.running = False
usbcamera.join()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
Add OpenCV viewer for debug.
|
Add OpenCV viewer for debug.
|
Python
|
mit
|
microy/StereoVision,microy/StereoVision,microy/VisionToolkit,microy/VisionToolkit,microy/PyStereoVisionToolkit,microy/PyStereoVisionToolkit
|
python
|
## Code Before:
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
## Instruction:
Add OpenCV viewer for debug.
## Code After:
import cv2
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( image ) :
# Display the stereo image
cv2.imshow( 'Camera', image )
cv2.waitKey( 1 )
#
# Main application
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.UsbCameraWidget()
# widget.show()
# sys.exit( application.exec_() )
# Initialize the stereo cameras
usbcamera = vt.UsbCamera( Callback )
# Lower the camera frame rate and resolution
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
# Start capture
usbcamera.start()
# Wait for user key press
raw_input( 'Press <enter> to stop the capture...' )
# Stop image acquisition
usbcamera.running = False
usbcamera.join()
# Cleanup OpenCV
cv2.destroyAllWindows()
|
// ... existing code ...
import cv2
#from PySide import QtGui
import VisionToolkit as vt
#
# Image callback function
#
def Callback( image ) :
# Display the stereo image
cv2.imshow( 'Camera', image )
cv2.waitKey( 1 )
#
// ... modified code ...
#
if __name__ == '__main__' :
# application = QtGui.QApplication( sys.argv )
# widget = vt.UsbCameraWidget()
# widget.show()
# sys.exit( application.exec_() )
# Initialize the stereo cameras
usbcamera = vt.UsbCamera( Callback )
# Lower the camera frame rate and resolution
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480 )
usbcamera.camera.set( cv2.cv.CV_CAP_PROP_FPS, 25 )
# Start capture
usbcamera.start()
# Wait for user key press
raw_input( 'Press <enter> to stop the capture...' )
# Stop image acquisition
usbcamera.running = False
usbcamera.join()
# Cleanup OpenCV
cv2.destroyAllWindows()
// ... rest of the code ...
|
5cadb19aec67945efa5c4367ee4d1aab6bbc66e7
|
git-issues/git-handler.py
|
git-issues/git-handler.py
|
import os
import re
import subprocess
import requests
GITHUB_API_ADDRESS = "https://api.github.com/"
def get_git_address():
response = subprocess.check_output(['git', 'remote', '-v'])
dirty = response.split('\n')
repos = {}
for repo in dirty:
rep = repo.split('\t')
if len(rep) > 1:
repos[rep[0]] = rep[1].replace(' (fetch)', '').replace(' (push)', '')
return repos
def get_issues(repos):
issues = []
import re
for k, v in repos.items():
repo_slug_match = re.search("\:(.*\/.*)\.git", v)
if repo_slug_match is not None:
repo_slug = repo_slug_match.group(1)
response = requests.get(GITHUB_API_ADDRESS + "repos/" +repo_slug + "/issues")
issues += response.json()
git_issues_dir = os.path.expanduser("~/.git-issues/")
if not os.path.exists(git_issues_dir):
os.makedirs(git_issues_dir)
with open(git_issues_dir + "%s.json" % re.search("\:.*\/(.*)\.git", repos['origin']).group(1), 'w') as f:
import json
f.write(json.dumps(issues))
if __name__ == '__main__':
repos = get_git_address()
get_issues(repos)
|
import os
import re
import subprocess
import requests
GITHUB_API_ADDRESS = "https://api.github.com/"
def get_git_address():
response = subprocess.check_output(['git', 'remote', '-v'])
dirty = response.split('\n')
repos = {}
for repo in dirty:
rep = repo.split('\t')
if len(rep) > 1:
repos[rep[0]] = rep[1].replace(' (fetch)', '').replace(' (push)', '')
return repos
def get_issues(repos):
issues = []
import re
for k, v in repos.items():
repo_slug_match = re.search("\:(.*\/.*)\.git", v)
if repo_slug_match is not None:
repo_slug = repo_slug_match.group(1)
response = requests.get(GITHUB_API_ADDRESS + "repos/" +repo_slug + "/issues")
issues += response.json()
write_issues_to_disk(issues)
def write_issues_to_disk(issues)
git_issues_dir = os.path.expanduser("~/.git-issues/")
if not os.path.exists(git_issues_dir):
os.makedirs(git_issues_dir)
with open(git_issues_dir + "%s.json" % re.search("\:.*\/(.*)\.git", repos['origin']).group(1), 'w') as f:
import json
f.write(json.dumps(issues))
if __name__ == '__main__':
repos = get_git_address()
get_issues(repos)
|
Split out writing to disk into it's own mfunction
|
Split out writing to disk into it's own mfunction
|
Python
|
apache-2.0
|
AutomatedTester/git-issues,AutomatedTester/git-issues,AutomatedTester/git-issues
|
python
|
## Code Before:
import os
import re
import subprocess
import requests
GITHUB_API_ADDRESS = "https://api.github.com/"
def get_git_address():
response = subprocess.check_output(['git', 'remote', '-v'])
dirty = response.split('\n')
repos = {}
for repo in dirty:
rep = repo.split('\t')
if len(rep) > 1:
repos[rep[0]] = rep[1].replace(' (fetch)', '').replace(' (push)', '')
return repos
def get_issues(repos):
issues = []
import re
for k, v in repos.items():
repo_slug_match = re.search("\:(.*\/.*)\.git", v)
if repo_slug_match is not None:
repo_slug = repo_slug_match.group(1)
response = requests.get(GITHUB_API_ADDRESS + "repos/" +repo_slug + "/issues")
issues += response.json()
git_issues_dir = os.path.expanduser("~/.git-issues/")
if not os.path.exists(git_issues_dir):
os.makedirs(git_issues_dir)
with open(git_issues_dir + "%s.json" % re.search("\:.*\/(.*)\.git", repos['origin']).group(1), 'w') as f:
import json
f.write(json.dumps(issues))
if __name__ == '__main__':
repos = get_git_address()
get_issues(repos)
## Instruction:
Split out writing to disk into it's own mfunction
## Code After:
import os
import re
import subprocess
import requests
GITHUB_API_ADDRESS = "https://api.github.com/"
def get_git_address():
response = subprocess.check_output(['git', 'remote', '-v'])
dirty = response.split('\n')
repos = {}
for repo in dirty:
rep = repo.split('\t')
if len(rep) > 1:
repos[rep[0]] = rep[1].replace(' (fetch)', '').replace(' (push)', '')
return repos
def get_issues(repos):
issues = []
import re
for k, v in repos.items():
repo_slug_match = re.search("\:(.*\/.*)\.git", v)
if repo_slug_match is not None:
repo_slug = repo_slug_match.group(1)
response = requests.get(GITHUB_API_ADDRESS + "repos/" +repo_slug + "/issues")
issues += response.json()
write_issues_to_disk(issues)
def write_issues_to_disk(issues)
git_issues_dir = os.path.expanduser("~/.git-issues/")
if not os.path.exists(git_issues_dir):
os.makedirs(git_issues_dir)
with open(git_issues_dir + "%s.json" % re.search("\:.*\/(.*)\.git", repos['origin']).group(1), 'w') as f:
import json
f.write(json.dumps(issues))
if __name__ == '__main__':
repos = get_git_address()
get_issues(repos)
|
...
repo_slug = repo_slug_match.group(1)
response = requests.get(GITHUB_API_ADDRESS + "repos/" +repo_slug + "/issues")
issues += response.json()
write_issues_to_disk(issues)
def write_issues_to_disk(issues)
git_issues_dir = os.path.expanduser("~/.git-issues/")
if not os.path.exists(git_issues_dir):
os.makedirs(git_issues_dir)
...
|
948088dde543747f8876cd4cf752fbb94b0b7797
|
src/main/java/fr/vidal/oss/jaxb/atom/core/LinkRel.java
|
src/main/java/fr/vidal/oss/jaxb/atom/core/LinkRel.java
|
package fr.vidal.oss.jaxb.atom.core;
public enum LinkRel {
alternate, source, related, self, inline;
}
|
package fr.vidal.oss.jaxb.atom.core;
public enum LinkRel {
alternate, source, related, self, inline, first, last, previous, next;
}
|
Add RFC5005 link relations for feed paging
|
Add RFC5005 link relations for feed paging
for further information you can refer yourself to this RFC :
https://tools.ietf.org/html/rfc5005 and IANA relations listing
http://www.iana.org/assignments/link-relations/link-relations.xhtml
|
Java
|
mit
|
vidal-community/atom-jaxb,softwarevidal/atom-jaxb
|
java
|
## Code Before:
package fr.vidal.oss.jaxb.atom.core;
public enum LinkRel {
alternate, source, related, self, inline;
}
## Instruction:
Add RFC5005 link relations for feed paging
for further information you can refer yourself to this RFC :
https://tools.ietf.org/html/rfc5005 and IANA relations listing
http://www.iana.org/assignments/link-relations/link-relations.xhtml
## Code After:
package fr.vidal.oss.jaxb.atom.core;
public enum LinkRel {
alternate, source, related, self, inline, first, last, previous, next;
}
|
# ... existing code ...
package fr.vidal.oss.jaxb.atom.core;
public enum LinkRel {
alternate, source, related, self, inline, first, last, previous, next;
}
# ... rest of the code ...
|
fc7cadecb95fa798a8e8aaeb544ad5464f13a533
|
nanomon/registry.py
|
nanomon/registry.py
|
from weakref import WeakValueDictionary
class DuplicateEntryError(Exception):
def __init__(self, name, obj, registry):
self.name = name
self.obj = obj
self.registry = registry
def __str__(self):
return "Duplicate entry in '%s' registry for '%s'." % (
self.registry._registry_name, self.name)
class Registry(WeakValueDictionary):
def __init__(self, object_type, *args, **kwargs):
self._object_type = object_type
#super(Registry, self).__init__(*args, **kwargs)
WeakValueDictionary.__init__(self, *args, **kwargs)
def __setitem__(self, name, value):
if not isinstance(value, self._object_type):
raise TypeError("This registry only accepts objects of type %s." %
(self._object_type.__name__))
if self.has_key(name):
raise DuplicateEntryError(name, value, self)
WeakValueDictionary.__setitem__(self, name, value)
|
from weakref import WeakValueDictionary
class DuplicateEntryError(Exception):
def __init__(self, name, obj, registry):
self.name = name
self.obj = obj
self.registry = registry
def __str__(self):
return "Duplicate entry in '%s' registry for '%s'." % (
self.registry._object_type.__name__, self.name)
class Registry(WeakValueDictionary):
def __init__(self, object_type, *args, **kwargs):
self._object_type = object_type
WeakValueDictionary.__init__(self, *args, **kwargs)
def __setitem__(self, name, value):
if not isinstance(value, self._object_type):
raise TypeError("This registry only accepts objects of type %s." %
(self._object_type.__name__))
if self.has_key(name):
raise DuplicateEntryError(name, value, self)
WeakValueDictionary.__setitem__(self, name, value)
|
Clean up some commented out code
|
Clean up some commented out code
|
Python
|
bsd-2-clause
|
cloudtools/nymms
|
python
|
## Code Before:
from weakref import WeakValueDictionary
class DuplicateEntryError(Exception):
def __init__(self, name, obj, registry):
self.name = name
self.obj = obj
self.registry = registry
def __str__(self):
return "Duplicate entry in '%s' registry for '%s'." % (
self.registry._registry_name, self.name)
class Registry(WeakValueDictionary):
def __init__(self, object_type, *args, **kwargs):
self._object_type = object_type
#super(Registry, self).__init__(*args, **kwargs)
WeakValueDictionary.__init__(self, *args, **kwargs)
def __setitem__(self, name, value):
if not isinstance(value, self._object_type):
raise TypeError("This registry only accepts objects of type %s." %
(self._object_type.__name__))
if self.has_key(name):
raise DuplicateEntryError(name, value, self)
WeakValueDictionary.__setitem__(self, name, value)
## Instruction:
Clean up some commented out code
## Code After:
from weakref import WeakValueDictionary
class DuplicateEntryError(Exception):
def __init__(self, name, obj, registry):
self.name = name
self.obj = obj
self.registry = registry
def __str__(self):
return "Duplicate entry in '%s' registry for '%s'." % (
self.registry._object_type.__name__, self.name)
class Registry(WeakValueDictionary):
def __init__(self, object_type, *args, **kwargs):
self._object_type = object_type
WeakValueDictionary.__init__(self, *args, **kwargs)
def __setitem__(self, name, value):
if not isinstance(value, self._object_type):
raise TypeError("This registry only accepts objects of type %s." %
(self._object_type.__name__))
if self.has_key(name):
raise DuplicateEntryError(name, value, self)
WeakValueDictionary.__setitem__(self, name, value)
|
...
def __str__(self):
return "Duplicate entry in '%s' registry for '%s'." % (
self.registry._object_type.__name__, self.name)
class Registry(WeakValueDictionary):
def __init__(self, object_type, *args, **kwargs):
self._object_type = object_type
WeakValueDictionary.__init__(self, *args, **kwargs)
def __setitem__(self, name, value):
...
|
a6bd1cfc5f87d6f9a7ac846665fcab5b02c33c1d
|
tubular/scripts/hipchat/submit_hipchat_msg.py
|
tubular/scripts/hipchat/submit_hipchat_msg.py
|
import os
import sys
import requests
import click
HIPCHAT_API_URL = "http://api.hipchat.com"
NOTIFICATION_POST = "/v2/room/{}/notification"
AUTH_HEADER = "Authorization: Bearer {}"
@click.command()
@click.option('--auth_token_env_var', '-a',
help="Environment variable containing authentication token to use for HipChat REST API.",
)
@click.option('--channel', '-c',
default="release pipeline",
help="Channel to which the script should post a message.",
)
def cli(auth_token_env_var, channel):
"""
Post a message to a HipChat channel.
"""
msg = "Test message from the demo GoCD release pipeline."
headers = {
"Authorization": "Bearer {}".format(os.environ[auth_token_env_var])
}
msg_payload = {
"color": "green",
"message": msg,
"notify": False,
"message_format": "text"
}
post_url = HIPCHAT_API_URL + NOTIFICATION_POST.format(channel)
r = requests.post(post_url, headers=headers, json=msg_payload)
# An exit code of 0 means success and non-zero means failure.
success = r.status_code in (200, 201, 204)
sys.exit(not success)
if __name__ == '__main__':
cli()
|
import os
import sys
import requests
import click
HIPCHAT_API_URL = "http://api.hipchat.com"
NOTIFICATION_POST = "/v2/room/{}/notification"
AUTH_HEADER = "Authorization: Bearer {}"
@click.command()
@click.option('--auth_token_env_var', '-a',
help="Environment variable containing authentication token to use for HipChat REST API.",
)
@click.option('--channel', '-c',
default="release pipeline",
help="Channel to which the script should post a message.",
)
@click.option('--message', '-m',
default="Default message.",
help="Message to send to HipChat channel.",
)
def cli(auth_token_env_var, channel, message):
"""
Post a message to a HipChat channel.
"""
headers = {
"Authorization": "Bearer {}".format(os.environ[auth_token_env_var])
}
msg_payload = {
"color": "green",
"message": message,
"notify": False,
"message_format": "text"
}
post_url = HIPCHAT_API_URL + NOTIFICATION_POST.format(channel)
r = requests.post(post_url, headers=headers, json=msg_payload)
# An exit code of 0 means success and non-zero means failure.
success = r.status_code in (200, 201, 204)
sys.exit(not success)
if __name__ == '__main__':
cli()
|
Add ability to set HipChat message contents.
|
Add ability to set HipChat message contents.
|
Python
|
agpl-3.0
|
eltoncarr/tubular,eltoncarr/tubular
|
python
|
## Code Before:
import os
import sys
import requests
import click
HIPCHAT_API_URL = "http://api.hipchat.com"
NOTIFICATION_POST = "/v2/room/{}/notification"
AUTH_HEADER = "Authorization: Bearer {}"
@click.command()
@click.option('--auth_token_env_var', '-a',
help="Environment variable containing authentication token to use for HipChat REST API.",
)
@click.option('--channel', '-c',
default="release pipeline",
help="Channel to which the script should post a message.",
)
def cli(auth_token_env_var, channel):
"""
Post a message to a HipChat channel.
"""
msg = "Test message from the demo GoCD release pipeline."
headers = {
"Authorization": "Bearer {}".format(os.environ[auth_token_env_var])
}
msg_payload = {
"color": "green",
"message": msg,
"notify": False,
"message_format": "text"
}
post_url = HIPCHAT_API_URL + NOTIFICATION_POST.format(channel)
r = requests.post(post_url, headers=headers, json=msg_payload)
# An exit code of 0 means success and non-zero means failure.
success = r.status_code in (200, 201, 204)
sys.exit(not success)
if __name__ == '__main__':
cli()
## Instruction:
Add ability to set HipChat message contents.
## Code After:
import os
import sys
import requests
import click
HIPCHAT_API_URL = "http://api.hipchat.com"
NOTIFICATION_POST = "/v2/room/{}/notification"
AUTH_HEADER = "Authorization: Bearer {}"
@click.command()
@click.option('--auth_token_env_var', '-a',
help="Environment variable containing authentication token to use for HipChat REST API.",
)
@click.option('--channel', '-c',
default="release pipeline",
help="Channel to which the script should post a message.",
)
@click.option('--message', '-m',
default="Default message.",
help="Message to send to HipChat channel.",
)
def cli(auth_token_env_var, channel, message):
"""
Post a message to a HipChat channel.
"""
headers = {
"Authorization": "Bearer {}".format(os.environ[auth_token_env_var])
}
msg_payload = {
"color": "green",
"message": message,
"notify": False,
"message_format": "text"
}
post_url = HIPCHAT_API_URL + NOTIFICATION_POST.format(channel)
r = requests.post(post_url, headers=headers, json=msg_payload)
# An exit code of 0 means success and non-zero means failure.
success = r.status_code in (200, 201, 204)
sys.exit(not success)
if __name__ == '__main__':
cli()
|
// ... existing code ...
default="release pipeline",
help="Channel to which the script should post a message.",
)
@click.option('--message', '-m',
default="Default message.",
help="Message to send to HipChat channel.",
)
def cli(auth_token_env_var, channel, message):
"""
Post a message to a HipChat channel.
"""
headers = {
"Authorization": "Bearer {}".format(os.environ[auth_token_env_var])
}
msg_payload = {
"color": "green",
"message": message,
"notify": False,
"message_format": "text"
}
// ... rest of the code ...
|
8e5617d8c0279c871c0d78bc3ad5d3676d35cbce
|
setup.py
|
setup.py
|
from setuptools import setup
filename = 'tweetfeels/version.py'
exec(compile(open(filename, "rb").read(), filename, 'exec'))
setup(name='tweetfeels',
version=__version__,
description='Real-time sentiment analysis for twitter.',
author='Thomas Chen',
author_email='[email protected]',
url='https://github.com/uclatommy/tweetfeels',
download_url='https://github.com/uclatommy/tweetfeels/tarball/{}'.format(
__version__
),
packages=['tweetfeels'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence'
],
install_requires=[
'tweepy', 'h5py', 'nltk', 'numpy', 'oauthlib', 'pandas',
'python-dateutil', 'pytz', 'requests', 'requests-oauthlib',
'six', 'twython'
],
test_suite='nose.collector',
tests_require=['nose']
)
|
from setuptools import setup
import os
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = 'Real-time sentiment analysis for twitter.'
filename = 'tweetfeels/version.py'
exec(compile(open(filename, "rb").read(), filename, 'exec'))
setup(name='tweetfeels',
version=__version__,
description='Real-time sentiment analysis for twitter.',
long_description=long_description,
author='Thomas Chen',
author_email='[email protected]',
url='https://github.com/uclatommy/tweetfeels',
download_url='https://github.com/uclatommy/tweetfeels/tarball/{}'.format(
__version__
),
packages=['tweetfeels'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence'
],
install_requires=[
'tweepy', 'h5py', 'nltk', 'numpy', 'oauthlib', 'pandas',
'python-dateutil', 'pytz', 'requests', 'requests-oauthlib',
'six', 'twython'
],
test_suite='nose.collector',
tests_require=['nose']
)
|
Use pandoc to convert read to rst and supply that as long_description to pypi.
|
Use pandoc to convert read to rst and supply that as long_description to pypi.
|
Python
|
bsd-3-clause
|
uclatommy/tweetfeels
|
python
|
## Code Before:
from setuptools import setup
filename = 'tweetfeels/version.py'
exec(compile(open(filename, "rb").read(), filename, 'exec'))
setup(name='tweetfeels',
version=__version__,
description='Real-time sentiment analysis for twitter.',
author='Thomas Chen',
author_email='[email protected]',
url='https://github.com/uclatommy/tweetfeels',
download_url='https://github.com/uclatommy/tweetfeels/tarball/{}'.format(
__version__
),
packages=['tweetfeels'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence'
],
install_requires=[
'tweepy', 'h5py', 'nltk', 'numpy', 'oauthlib', 'pandas',
'python-dateutil', 'pytz', 'requests', 'requests-oauthlib',
'six', 'twython'
],
test_suite='nose.collector',
tests_require=['nose']
)
## Instruction:
Use pandoc to convert read to rst and supply that as long_description to pypi.
## Code After:
from setuptools import setup
import os
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = 'Real-time sentiment analysis for twitter.'
filename = 'tweetfeels/version.py'
exec(compile(open(filename, "rb").read(), filename, 'exec'))
setup(name='tweetfeels',
version=__version__,
description='Real-time sentiment analysis for twitter.',
long_description=long_description,
author='Thomas Chen',
author_email='[email protected]',
url='https://github.com/uclatommy/tweetfeels',
download_url='https://github.com/uclatommy/tweetfeels/tarball/{}'.format(
__version__
),
packages=['tweetfeels'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Artificial Intelligence'
],
install_requires=[
'tweepy', 'h5py', 'nltk', 'numpy', 'oauthlib', 'pandas',
'python-dateutil', 'pytz', 'requests', 'requests-oauthlib',
'six', 'twython'
],
test_suite='nose.collector',
tests_require=['nose']
)
|
# ... existing code ...
from setuptools import setup
import os
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = 'Real-time sentiment analysis for twitter.'
filename = 'tweetfeels/version.py'
exec(compile(open(filename, "rb").read(), filename, 'exec'))
# ... modified code ...
setup(name='tweetfeels',
version=__version__,
description='Real-time sentiment analysis for twitter.',
long_description=long_description,
author='Thomas Chen',
author_email='[email protected]',
url='https://github.com/uclatommy/tweetfeels',
# ... rest of the code ...
|
ff35b4353fbb47c602d3561c5e6e84201355df14
|
Cryptor.py
|
Cryptor.py
|
from Crypto.Cipher import AES
class Cryptor(object):
def __init__(self, key, iv):
#self.aes = AES.new(key, mode=AES.MODE_CBC, IV=iv) # This resembles stuff from shairtunes
self.aes = AES.new(key, mode=AES.MODE_ECB, IV=iv) # I found this in airtunesd
self.inbuf = ""
self.outbuf = ""
self.lastLen = 0
def decrypt(self, data):
self.inbuf += data
blocksEnd = len(self.inbuf)
blocksEnd -= blocksEnd % AES.block_size
self.outbuf += self.aes.decrypt(self.inbuf[:blocksEnd])
self.inbuf = self.inbuf[blocksEnd:]
res = self.outbuf[:self.lastLen]
self.outbuf = self.outbuf[self.lastLen:]
self.lastLen = len(data)
return res
class EchoCryptor(object):
def decrypt(self, data):
return data
|
from Crypto.Cipher import AES
import Crypto.Util.Counter
class Cryptor(AES.AESCipher):
def __init__(self, key, iv):
self.counter = Crypto.Util.Counter.new(128, initial_value=long(iv.encode("hex"), 16))
AES.AESCipher.__init__(self, key, mode=AES.MODE_CTR, counter=self.counter)
class EchoCryptor(object):
def decrypt(self, data):
return data
|
Use CTR as encrypton mode. Works with iOS6.
|
Use CTR as encrypton mode. Works with iOS6.
|
Python
|
bsd-2-clause
|
tzwenn/PyOpenAirMirror,tzwenn/PyOpenAirMirror
|
python
|
## Code Before:
from Crypto.Cipher import AES
class Cryptor(object):
def __init__(self, key, iv):
#self.aes = AES.new(key, mode=AES.MODE_CBC, IV=iv) # This resembles stuff from shairtunes
self.aes = AES.new(key, mode=AES.MODE_ECB, IV=iv) # I found this in airtunesd
self.inbuf = ""
self.outbuf = ""
self.lastLen = 0
def decrypt(self, data):
self.inbuf += data
blocksEnd = len(self.inbuf)
blocksEnd -= blocksEnd % AES.block_size
self.outbuf += self.aes.decrypt(self.inbuf[:blocksEnd])
self.inbuf = self.inbuf[blocksEnd:]
res = self.outbuf[:self.lastLen]
self.outbuf = self.outbuf[self.lastLen:]
self.lastLen = len(data)
return res
class EchoCryptor(object):
def decrypt(self, data):
return data
## Instruction:
Use CTR as encrypton mode. Works with iOS6.
## Code After:
from Crypto.Cipher import AES
import Crypto.Util.Counter
class Cryptor(AES.AESCipher):
def __init__(self, key, iv):
self.counter = Crypto.Util.Counter.new(128, initial_value=long(iv.encode("hex"), 16))
AES.AESCipher.__init__(self, key, mode=AES.MODE_CTR, counter=self.counter)
class EchoCryptor(object):
def decrypt(self, data):
return data
|
// ... existing code ...
from Crypto.Cipher import AES
import Crypto.Util.Counter
class Cryptor(AES.AESCipher):
def __init__(self, key, iv):
self.counter = Crypto.Util.Counter.new(128, initial_value=long(iv.encode("hex"), 16))
AES.AESCipher.__init__(self, key, mode=AES.MODE_CTR, counter=self.counter)
class EchoCryptor(object):
// ... rest of the code ...
|
518df76dcc14895f4555451194f64a98ccc814ef
|
pymco/utils.py
|
pymco/utils.py
|
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = __import__(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
import importlib
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
Use importlib.import_module instead of __import__
|
Use importlib.import_module instead of __import__
|
Python
|
bsd-3-clause
|
rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective
|
python
|
## Code Before:
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = __import__(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
## Instruction:
Use importlib.import_module instead of __import__
## Code After:
import importlib
def import_class(import_path):
"""Import a class based on given dotted import path string.
It just splits the import path in order to geth the module and class names,
then it just calls to :py:func:`__import__` with the module name and
:py:func:`getattr` with the module and the class name.
Params:
``import_path``: A dotted import path string.
Returns:
``class``: The class once imported.
Raises:
:py:exc:`ImportError`
"""
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
klass=klass_str,
mod=mod_str,
))
def import_object(import_path, *args, **kwargs):
"""Import a class and instantiate it.
Uses :py:func:`import_class` in order to import the given class by its
import path and instantiate it using given positional and keyword
arguments.
Params:
``import_path``: Same argument as :py:func:`import_class`.
``args``: Positional arguments for object instantiation.
``kwargs``: Keyword arguments for object instantiation.
"""
return import_class(import_path)(*args, **kwargs)
|
// ... existing code ...
import importlib
def import_class(import_path):
// ... modified code ...
parts = import_path.split('.')
mod_str, klass_str = '.'.join(parts[:-1]), parts[-1]
try:
mod = importlib.import_module(mod_str)
return getattr(mod, klass_str)
except (AttributeError, ValueError):
raise ImportError('Unable to import {klass} from module {mod}'.format(
// ... rest of the code ...
|
ad1b7cb8dda0dc2565aab6cd8c6a392753682875
|
wapiti/helpers.py
|
wapiti/helpers.py
|
from collections import namedtuple
from decorator import decorator
from functools import wraps
from django.db.models import get_apps
from piston.utils import rc
from wapiti.conf import ID_RE
_RegisteredType = namedtuple('RegisteredType', ('api', ))
_registered_types = {}
def register(name, modelapi):
"""Register a model with the API"""
global _registered_types
if modelapi.__name__ in _registered_types:
return
if not modelapi.objects:
modelapi.objects = modelapi.model.objects
_registered_types[name] = _RegisteredType(api=modelapi)
def _api_method(f, *args, **kwargs):
return f(*args, **kwargs)
def api_method(f):
"""Decorator to declare a method api-accessible"""
f.api = True
return decorator(_api_method, f)
def _is_id(id):
return ID_RE.match(id)
def _register_models():
"""Find app api submodules and register models"""
for a in get_apps():
try:
_temp = __import__('.'.join(a.__name__.split('.')[:-1] + ['api']),
globals(), locals())
except ImportError:
pass
|
from collections import namedtuple
from decorator import decorator
from functools import wraps
from django.db.models import get_apps
from piston.utils import rc
from wapiti.conf import ID_RE
_RegisteredType = namedtuple('RegisteredType', ('api', ))
_registered_types = {}
def register(name, modelapi):
"""Register a model with the API"""
global _registered_types
if modelapi.__name__ in _registered_types:
return
if not hasattr(modelapi, 'objects'):
modelapi.objects = modelapi.model.objects
_registered_types[name] = _RegisteredType(api=modelapi)
def _api_method(f, *args, **kwargs):
return f(*args, **kwargs)
def api_method(f):
"""Decorator to declare a method api-accessible"""
f.api = True
return decorator(_api_method, f)
def _is_id(id):
return ID_RE.match(id)
def _register_models():
"""Find app api submodules and register models"""
for a in get_apps():
try:
_temp = __import__('.'.join(a.__name__.split('.')[:-1] + ['api']),
globals(), locals())
except ImportError:
pass
|
Check for the existence of model.objects without calling it; may fix some weird buggy behaviour involving database migrations.
|
Check for the existence of model.objects without calling it; may fix some weird buggy behaviour involving database migrations.
|
Python
|
bsd-3-clause
|
ecometrica/django-wapiti
|
python
|
## Code Before:
from collections import namedtuple
from decorator import decorator
from functools import wraps
from django.db.models import get_apps
from piston.utils import rc
from wapiti.conf import ID_RE
_RegisteredType = namedtuple('RegisteredType', ('api', ))
_registered_types = {}
def register(name, modelapi):
"""Register a model with the API"""
global _registered_types
if modelapi.__name__ in _registered_types:
return
if not modelapi.objects:
modelapi.objects = modelapi.model.objects
_registered_types[name] = _RegisteredType(api=modelapi)
def _api_method(f, *args, **kwargs):
return f(*args, **kwargs)
def api_method(f):
"""Decorator to declare a method api-accessible"""
f.api = True
return decorator(_api_method, f)
def _is_id(id):
return ID_RE.match(id)
def _register_models():
"""Find app api submodules and register models"""
for a in get_apps():
try:
_temp = __import__('.'.join(a.__name__.split('.')[:-1] + ['api']),
globals(), locals())
except ImportError:
pass
## Instruction:
Check for the existence of model.objects without calling it; may fix some weird buggy behaviour involving database migrations.
## Code After:
from collections import namedtuple
from decorator import decorator
from functools import wraps
from django.db.models import get_apps
from piston.utils import rc
from wapiti.conf import ID_RE
_RegisteredType = namedtuple('RegisteredType', ('api', ))
_registered_types = {}
def register(name, modelapi):
"""Register a model with the API"""
global _registered_types
if modelapi.__name__ in _registered_types:
return
if not hasattr(modelapi, 'objects'):
modelapi.objects = modelapi.model.objects
_registered_types[name] = _RegisteredType(api=modelapi)
def _api_method(f, *args, **kwargs):
return f(*args, **kwargs)
def api_method(f):
"""Decorator to declare a method api-accessible"""
f.api = True
return decorator(_api_method, f)
def _is_id(id):
return ID_RE.match(id)
def _register_models():
"""Find app api submodules and register models"""
for a in get_apps():
try:
_temp = __import__('.'.join(a.__name__.split('.')[:-1] + ['api']),
globals(), locals())
except ImportError:
pass
|
# ... existing code ...
global _registered_types
if modelapi.__name__ in _registered_types:
return
if not hasattr(modelapi, 'objects'):
modelapi.objects = modelapi.model.objects
_registered_types[name] = _RegisteredType(api=modelapi)
# ... rest of the code ...
|
b6de7b132a0733737658b7ba5d1262f825ad9ca4
|
src/test/java/com/clxcommunications/xms/ApiErrorTest.java
|
src/test/java/com/clxcommunications/xms/ApiErrorTest.java
|
package com.clxcommunications.xms;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.junit.Test;
public class ApiErrorTest {
private final ApiObjectMapper json = new ApiObjectMapper();
@Test
public void canSerializeJson() throws Exception {
String code = RandomStringUtils.randomPrint(1, 20);
String text = RandomStringUtils.randomPrint(1, 20);
ApiError input = ImmutableApiError.of(code, text);
String expected = Utils.join("\n",
"{",
" \"code\" : \"" + StringEscapeUtils.escapeJson(code) + "\",",
" \"text\" : \"" + StringEscapeUtils.escapeJson(text) + "\"",
"}");
String actual = json.writeValueAsString(input);
assertThat(actual, is(TestUtils.jsonEqualTo(expected)));
}
@Test
public void canDeserializeJson() throws Exception {
String code = RandomStringUtils.randomPrint(1, 20);
String text = RandomStringUtils.randomPrint(1, 20);
ApiError expected = ImmutableApiError.of(code, text);
String input = json.writeValueAsString(expected);
ApiError actual = json.readValue(input, ApiError.class);
assertThat(actual, is(expected));
}
}
|
package com.clxcommunications.xms;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.apache.commons.lang3.StringEscapeUtils;
import org.junit.runner.RunWith;
import com.pholser.junit.quickcheck.Property;
import com.pholser.junit.quickcheck.runner.JUnitQuickcheck;
@RunWith(JUnitQuickcheck.class)
public class ApiErrorTest {
private final ApiObjectMapper json = new ApiObjectMapper();
@Property
public void canSerializeJson(String code, String text) throws Exception {
ApiError input = ImmutableApiError.of(code, text);
String expected = Utils.join("\n",
"{",
" \"code\" : \"" + StringEscapeUtils.escapeJson(code) + "\",",
" \"text\" : \"" + StringEscapeUtils.escapeJson(text) + "\"",
"}");
String actual = json.writeValueAsString(input);
assertThat(actual, is(TestUtils.jsonEqualTo(expected)));
}
@Property
public void canDeserializeJson(String code, String text) throws Exception {
ApiError expected = ImmutableApiError.of(code, text);
String input = json.writeValueAsString(expected);
ApiError actual = json.readValue(input, ApiError.class);
assertThat(actual, is(expected));
}
}
|
Use JUnit QuickCheck in ApiError test
|
Use JUnit QuickCheck in ApiError test
|
Java
|
apache-2.0
|
clxcommunications/sdk-xms-java
|
java
|
## Code Before:
package com.clxcommunications.xms;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.junit.Test;
public class ApiErrorTest {
private final ApiObjectMapper json = new ApiObjectMapper();
@Test
public void canSerializeJson() throws Exception {
String code = RandomStringUtils.randomPrint(1, 20);
String text = RandomStringUtils.randomPrint(1, 20);
ApiError input = ImmutableApiError.of(code, text);
String expected = Utils.join("\n",
"{",
" \"code\" : \"" + StringEscapeUtils.escapeJson(code) + "\",",
" \"text\" : \"" + StringEscapeUtils.escapeJson(text) + "\"",
"}");
String actual = json.writeValueAsString(input);
assertThat(actual, is(TestUtils.jsonEqualTo(expected)));
}
@Test
public void canDeserializeJson() throws Exception {
String code = RandomStringUtils.randomPrint(1, 20);
String text = RandomStringUtils.randomPrint(1, 20);
ApiError expected = ImmutableApiError.of(code, text);
String input = json.writeValueAsString(expected);
ApiError actual = json.readValue(input, ApiError.class);
assertThat(actual, is(expected));
}
}
## Instruction:
Use JUnit QuickCheck in ApiError test
## Code After:
package com.clxcommunications.xms;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.apache.commons.lang3.StringEscapeUtils;
import org.junit.runner.RunWith;
import com.pholser.junit.quickcheck.Property;
import com.pholser.junit.quickcheck.runner.JUnitQuickcheck;
@RunWith(JUnitQuickcheck.class)
public class ApiErrorTest {
private final ApiObjectMapper json = new ApiObjectMapper();
@Property
public void canSerializeJson(String code, String text) throws Exception {
ApiError input = ImmutableApiError.of(code, text);
String expected = Utils.join("\n",
"{",
" \"code\" : \"" + StringEscapeUtils.escapeJson(code) + "\",",
" \"text\" : \"" + StringEscapeUtils.escapeJson(text) + "\"",
"}");
String actual = json.writeValueAsString(input);
assertThat(actual, is(TestUtils.jsonEqualTo(expected)));
}
@Property
public void canDeserializeJson(String code, String text) throws Exception {
ApiError expected = ImmutableApiError.of(code, text);
String input = json.writeValueAsString(expected);
ApiError actual = json.readValue(input, ApiError.class);
assertThat(actual, is(expected));
}
}
|
# ... existing code ...
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.apache.commons.lang3.StringEscapeUtils;
import org.junit.runner.RunWith;
import com.pholser.junit.quickcheck.Property;
import com.pholser.junit.quickcheck.runner.JUnitQuickcheck;
@RunWith(JUnitQuickcheck.class)
public class ApiErrorTest {
private final ApiObjectMapper json = new ApiObjectMapper();
@Property
public void canSerializeJson(String code, String text) throws Exception {
ApiError input = ImmutableApiError.of(code, text);
String expected = Utils.join("\n",
# ... modified code ...
assertThat(actual, is(TestUtils.jsonEqualTo(expected)));
}
@Property
public void canDeserializeJson(String code, String text) throws Exception {
ApiError expected = ImmutableApiError.of(code, text);
String input = json.writeValueAsString(expected);
# ... rest of the code ...
|
cd68d5bf444385334841a5ce07058cddb314ff82
|
lobster/cmssw/data/merge_cfg.py
|
lobster/cmssw/data/merge_cfg.py
|
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('chirp', default=None, mytype=VarParsing.varType.string)
options.register('inputs', mult=VarParsing.multiplicity.list, mytype=VarParsing.varType.string)
options.register('output', mytype=VarParsing.varType.string)
options.parseArguments()
if options.chirp:
for input in options.inputs:
status = subprocess.call([os.path.join(os.environ.get("PARROT_PATH", "bin"), "chirp_get"),
options.chirp,
input,
os.path.basename(input)])
if status != 0:
sys.exit(500)
process = cms.Process("PickEvent")
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
|
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('chirp', default=None, mytype=VarParsing.varType.string)
options.register('inputs', mult=VarParsing.multiplicity.list, mytype=VarParsing.varType.string)
options.register('output', mytype=VarParsing.varType.string)
options.parseArguments()
if options.chirp:
for input in options.inputs:
status = subprocess.call([os.path.join(os.environ.get("PARROT_PATH", "bin"), "chirp_get"),
options.chirp,
input,
os.path.basename(input)])
if status != 0:
sys.exit(500)
process = cms.Process("PickEvent")
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
|
Disable duplicate check mode for merging-- this can cause events to be thrown out for MC data.
|
Disable duplicate check mode for merging-- this can cause events to be thrown out for MC data.
|
Python
|
mit
|
matz-e/lobster,matz-e/lobster,matz-e/lobster
|
python
|
## Code Before:
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('chirp', default=None, mytype=VarParsing.varType.string)
options.register('inputs', mult=VarParsing.multiplicity.list, mytype=VarParsing.varType.string)
options.register('output', mytype=VarParsing.varType.string)
options.parseArguments()
if options.chirp:
for input in options.inputs:
status = subprocess.call([os.path.join(os.environ.get("PARROT_PATH", "bin"), "chirp_get"),
options.chirp,
input,
os.path.basename(input)])
if status != 0:
sys.exit(500)
process = cms.Process("PickEvent")
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
## Instruction:
Disable duplicate check mode for merging-- this can cause events to be thrown out for MC data.
## Code After:
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
import subprocess
import os
import sys
options = VarParsing('analysis')
options.register('chirp', default=None, mytype=VarParsing.varType.string)
options.register('inputs', mult=VarParsing.multiplicity.list, mytype=VarParsing.varType.string)
options.register('output', mytype=VarParsing.varType.string)
options.parseArguments()
if options.chirp:
for input in options.inputs:
status = subprocess.call([os.path.join(os.environ.get("PARROT_PATH", "bin"), "chirp_get"),
options.chirp,
input,
os.path.basename(input)])
if status != 0:
sys.exit(500)
process = cms.Process("PickEvent")
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string(options.output)
)
process.end = cms.EndPath(process.out)
|
# ... existing code ...
process = cms.Process("PickEvent")
process.source = cms.Source ("PoolSource",
fileNames = cms.untracked.vstring(''),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
)
process.out = cms.OutputModule("PoolOutputModule",
# ... rest of the code ...
|
6281da3b846bfea26ea68e3fe480c738a5181506
|
runtests.py
|
runtests.py
|
import optparse
import sys
import unittest
from walrus import tests
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
options, args = parser.parse_args()
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
import optparse
import os
import sys
import unittest
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
parser.add_option('-z', '--zpop', action='store_true',
help='Run ZPOP* tests.')
options, args = parser.parse_args()
if options.zpop:
os.environ['TEST_ZPOP'] = '1'
from walrus import tests
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
Add test-runner option to run zpop* tests.
|
Add test-runner option to run zpop* tests.
|
Python
|
mit
|
coleifer/walrus
|
python
|
## Code Before:
import optparse
import sys
import unittest
from walrus import tests
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
options, args = parser.parse_args()
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
## Instruction:
Add test-runner option to run zpop* tests.
## Code After:
import optparse
import os
import sys
import unittest
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
parser.add_option('-z', '--zpop', action='store_true',
help='Run ZPOP* tests.')
options, args = parser.parse_args()
if options.zpop:
os.environ['TEST_ZPOP'] = '1'
from walrus import tests
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
...
import optparse
import os
import sys
import unittest
def runtests(verbose=False, failfast=False, names=None):
if names:
...
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
parser.add_option('-z', '--zpop', action='store_true',
help='Run ZPOP* tests.')
options, args = parser.parse_args()
if options.zpop:
os.environ['TEST_ZPOP'] = '1'
from walrus import tests
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
...
|
45c773c0d2c90a57949a758ab3ac5c15e2942528
|
resource_mgt.py
|
resource_mgt.py
|
"""Class to show file manipulations"""
import sys
original_file = open('wasteland.txt', mode='rt', encoding='utf-8')
file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8')
file_to_write.write("What are the roots that clutch, ")
file_to_write.write('what branches grow\n')
file_to_write.close()
file_reading = open('wasteland.txt', mode='rt', encoding='utf-8')
for line in file_reading.readlines():
print(line)
file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8')
file_to_append.writelines(
['Son of man,\n',
'You cannot say, or guess, ',
'for you know only,\n',
'A heap of broken images, ',
'where the sun beats\n'])
file_to_append.close()
|
"""Class to show file manipulations"""
import sys
original_file = open('wasteland.txt', mode='rt', encoding='utf-8')
file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8')
file_to_write.write("What are the roots that clutch, ")
file_to_write.write('what branches grow\n')
file_to_write.close()
file_reading = open('wasteland.txt', mode='rt', encoding='utf-8')
for line in file_reading.readlines():
print(line)
file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8')
file_to_append.writelines(
['Son of man,\n',
'You cannot say, or guess, ',
'for you know only,\n',
'A heap of broken images, ',
'where the sun beats\n'])
file_to_append.close()
def words_per_line(flo):
return [len(line.split()) for line in flo.readlines()]
|
Add a words per line function
|
Add a words per line function
|
Python
|
mit
|
kentoj/python-fundamentals
|
python
|
## Code Before:
"""Class to show file manipulations"""
import sys
original_file = open('wasteland.txt', mode='rt', encoding='utf-8')
file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8')
file_to_write.write("What are the roots that clutch, ")
file_to_write.write('what branches grow\n')
file_to_write.close()
file_reading = open('wasteland.txt', mode='rt', encoding='utf-8')
for line in file_reading.readlines():
print(line)
file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8')
file_to_append.writelines(
['Son of man,\n',
'You cannot say, or guess, ',
'for you know only,\n',
'A heap of broken images, ',
'where the sun beats\n'])
file_to_append.close()
## Instruction:
Add a words per line function
## Code After:
"""Class to show file manipulations"""
import sys
original_file = open('wasteland.txt', mode='rt', encoding='utf-8')
file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8')
file_to_write.write("What are the roots that clutch, ")
file_to_write.write('what branches grow\n')
file_to_write.close()
file_reading = open('wasteland.txt', mode='rt', encoding='utf-8')
for line in file_reading.readlines():
print(line)
file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8')
file_to_append.writelines(
['Son of man,\n',
'You cannot say, or guess, ',
'for you know only,\n',
'A heap of broken images, ',
'where the sun beats\n'])
file_to_append.close()
def words_per_line(flo):
return [len(line.split()) for line in flo.readlines()]
|
...
"""Class to show file manipulations"""
import sys
original_file = open('wasteland.txt', mode='rt', encoding='utf-8')
file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8')
...
for line in file_reading.readlines():
print(line)
file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8')
file_to_append.writelines(
['Son of man,\n',
...
'A heap of broken images, ',
'where the sun beats\n'])
file_to_append.close()
def words_per_line(flo):
return [len(line.split()) for line in flo.readlines()]
...
|
3cb25e903ad0fd342509d32dca2d3c507f001b5a
|
devilry/devilry_autoset_empty_email_by_username/models.py
|
devilry/devilry_autoset_empty_email_by_username/models.py
|
from django.db.models.signals import post_save
from devilry.devilry_account.models import User
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
post_save.connect(set_email_by_username,
sender=User)
|
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
# post_save.connect(set_email_by_username,
# sender=User)
|
Comment out the post_save connect line.
|
devilry_autoset_empty_email_by_username: Comment out the post_save connect line.
|
Python
|
bsd-3-clause
|
devilry/devilry-django,devilry/devilry-django,devilry/devilry-django,devilry/devilry-django
|
python
|
## Code Before:
from django.db.models.signals import post_save
from devilry.devilry_account.models import User
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
post_save.connect(set_email_by_username,
sender=User)
## Instruction:
devilry_autoset_empty_email_by_username: Comment out the post_save connect line.
## Code After:
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
# post_save.connect(set_email_by_username,
# sender=User)
|
// ... existing code ...
from django.conf import settings
// ... modified code ...
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
# post_save.connect(set_email_by_username,
# sender=User)
// ... rest of the code ...
|
5bf441e34b672a5a369ad7e42cdc2fc7f7699476
|
publishers/base_publisher.py
|
publishers/base_publisher.py
|
from shared.base_component import BaseComponent
class BasePublisher(BaseComponent):
def __init__(self, conf):
BaseComponent.__init__(self, conf)
def publish(self, message):
pass
def __call__(self, message):
if self.query.match(message):
message = self.project.transform(message)
self.publish(message)
def close(self):
pass
|
from shared.base_component import BaseComponent
class BasePublisher(BaseComponent):
def __init__(self, conf):
BaseComponent.__init__(self, conf)
def publish(self, message):
pass
def __call__(self, message):
if self.query.match(message):
message = self.project.transform(message)
if message is not None:
self.publish(message)
def close(self):
pass
|
Discard None values in projections in publishers
|
Discard None values in projections in publishers
|
Python
|
mit
|
weapp/miner
|
python
|
## Code Before:
from shared.base_component import BaseComponent
class BasePublisher(BaseComponent):
def __init__(self, conf):
BaseComponent.__init__(self, conf)
def publish(self, message):
pass
def __call__(self, message):
if self.query.match(message):
message = self.project.transform(message)
self.publish(message)
def close(self):
pass
## Instruction:
Discard None values in projections in publishers
## Code After:
from shared.base_component import BaseComponent
class BasePublisher(BaseComponent):
def __init__(self, conf):
BaseComponent.__init__(self, conf)
def publish(self, message):
pass
def __call__(self, message):
if self.query.match(message):
message = self.project.transform(message)
if message is not None:
self.publish(message)
def close(self):
pass
|
// ... existing code ...
def __call__(self, message):
if self.query.match(message):
message = self.project.transform(message)
if message is not None:
self.publish(message)
def close(self):
pass
// ... rest of the code ...
|
313a81093527c88631713f6b4ad8c652554edb50
|
l10n_br_base/migrations/12.0.1.0.0/post-migration.py
|
l10n_br_base/migrations/12.0.1.0.0/post-migration.py
|
from openupgradelib import openupgrade
@openupgrade.migrate()
def migrate(env, version):
cr = env.cr
cr.execute(
'''INSERT INTO res_city(id, name, country_id, state_id, ibge_code)
SELECT nextval('res_city_id_seq'), name, (SELECT id FROM res_country
WHERE code='BR'), state_id, ibge_code FROM l10n_br_base_city
WHERE ibge_code NOT IN (SELECT ibge_code FROM res_city);
''')
cr.execute(
'''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id)
SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id,
state_id FROM other_inscricoes_estaduais;
''')
cr.execute(
'''UPDATE res_partner rp SET city_id=(
SELECT id FROM res_city WHERE ibge_code=(
SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id))
''')
|
from openupgradelib import openupgrade
_model_renames = [
('l10n_br_base.city', 'res.city'),
]
_table_renames = [
('l10n_br_base_city', 'res_city'),
]
@openupgrade.migrate()
def migrate(env, version):
cr = env.cr
openupgrade.rename_models(cr, _model_renames)
openupgrade.rename_tables(cr, _table_renames)
cr.execute(
'''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id)
SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id,
state_id FROM other_inscricoes_estaduais;
''')
cr.execute(
'''UPDATE res_partner rp SET city_id=(
SELECT id FROM res_city WHERE ibge_code=(
SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id))
''')
|
Rename table _model_renames and _table_renames
|
[ADD] Rename table _model_renames and _table_renames
Signed-off-by: Luis Felipe Mileo <[email protected]>
|
Python
|
agpl-3.0
|
akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil
|
python
|
## Code Before:
from openupgradelib import openupgrade
@openupgrade.migrate()
def migrate(env, version):
cr = env.cr
cr.execute(
'''INSERT INTO res_city(id, name, country_id, state_id, ibge_code)
SELECT nextval('res_city_id_seq'), name, (SELECT id FROM res_country
WHERE code='BR'), state_id, ibge_code FROM l10n_br_base_city
WHERE ibge_code NOT IN (SELECT ibge_code FROM res_city);
''')
cr.execute(
'''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id)
SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id,
state_id FROM other_inscricoes_estaduais;
''')
cr.execute(
'''UPDATE res_partner rp SET city_id=(
SELECT id FROM res_city WHERE ibge_code=(
SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id))
''')
## Instruction:
[ADD] Rename table _model_renames and _table_renames
Signed-off-by: Luis Felipe Mileo <[email protected]>
## Code After:
from openupgradelib import openupgrade
_model_renames = [
('l10n_br_base.city', 'res.city'),
]
_table_renames = [
('l10n_br_base_city', 'res_city'),
]
@openupgrade.migrate()
def migrate(env, version):
cr = env.cr
openupgrade.rename_models(cr, _model_renames)
openupgrade.rename_tables(cr, _table_renames)
cr.execute(
'''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id)
SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id,
state_id FROM other_inscricoes_estaduais;
''')
cr.execute(
'''UPDATE res_partner rp SET city_id=(
SELECT id FROM res_city WHERE ibge_code=(
SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id))
''')
|
...
from openupgradelib import openupgrade
_model_renames = [
('l10n_br_base.city', 'res.city'),
]
_table_renames = [
('l10n_br_base_city', 'res_city'),
]
@openupgrade.migrate()
def migrate(env, version):
cr = env.cr
openupgrade.rename_models(cr, _model_renames)
openupgrade.rename_tables(cr, _table_renames)
cr.execute(
'''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id)
SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id,
state_id FROM other_inscricoes_estaduais;
''')
cr.execute(
...
|
aff77b144c1a1895c9e8c0ca2d4e79451525901c
|
terminus/models/trunk.py
|
terminus/models/trunk.py
|
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
|
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2, reversed=True)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
|
Make Trunks have opposite directions in the included lanes
|
Make Trunks have opposite directions in the included lanes
|
Python
|
apache-2.0
|
ekumenlabs/terminus,ekumenlabs/terminus
|
python
|
## Code Before:
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
## Instruction:
Make Trunks have opposite directions in the included lanes
## Code After:
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2, reversed=True)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
|
// ... existing code ...
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2, reversed=True)
def accept(self, generator):
generator.start_trunk(self)
// ... rest of the code ...
|
8546d9bfe3e18b48424e4c42d3eaa51713f0df49
|
src/codechicken/lib/asm/RenderHookTransformer.java
|
src/codechicken/lib/asm/RenderHookTransformer.java
|
package codechicken.lib.asm;
import codechicken.lib.asm.ModularASMTransformer.MethodInjector;
import net.minecraft.launchwrapper.IClassTransformer;
import java.util.Map;
public class RenderHookTransformer implements IClassTransformer
{
private ModularASMTransformer transformer = new ModularASMTransformer();
public RenderHookTransformer() {
Map<String, ASMBlock> blocks = ASMReader.loadResource("/assets/ccl/asm/hooks.asm");
transformer.add(new MethodInjector(new ObfMapping("net/minecraft/client/renderer/entity/RenderItem",
"renderItem", "(Lnet/minecraft/item/ItemStack;Lnet/minecraft/client/resources/model/IBakedModel;)V"),
blocks.get("n_IItemRenderer"), blocks.get("IItemRenderer"), true));
}
@Override
public byte[] transform(String name, String tname, byte[] bytes) {
return transformer.transform(name, bytes);
}
}
|
package codechicken.lib.asm;
import codechicken.lib.asm.ModularASMTransformer.MethodInjector;
import net.minecraft.launchwrapper.IClassTransformer;
import java.util.Map;
public class RenderHookTransformer implements IClassTransformer
{
private ModularASMTransformer transformer = new ModularASMTransformer();
public RenderHookTransformer() {
Map<String, ASMBlock> blocks = ASMReader.loadResource("/assets/ccl/asm/hooks.asm");
transformer.add(new MethodInjector(new ObfMapping("net/minecraft/client/renderer/entity/RenderItem",
"func_180454_a", "(Lnet/minecraft/item/ItemStack;Lnet/minecraft/client/resources/model/IBakedModel;)V"),
blocks.get("n_IItemRenderer"), blocks.get("IItemRenderer"), true));
}
@Override
public byte[] transform(String name, String tname, byte[] bytes) {
return transformer.transform(name, bytes);
}
}
|
Fix srg mapping in hook
|
Fix srg mapping in hook
|
Java
|
lgpl-2.1
|
Chicken-Bones/CodeChickenLib,TheCBProject/CodeChickenLib,alexbegt/CodeChickenLib
|
java
|
## Code Before:
package codechicken.lib.asm;
import codechicken.lib.asm.ModularASMTransformer.MethodInjector;
import net.minecraft.launchwrapper.IClassTransformer;
import java.util.Map;
public class RenderHookTransformer implements IClassTransformer
{
private ModularASMTransformer transformer = new ModularASMTransformer();
public RenderHookTransformer() {
Map<String, ASMBlock> blocks = ASMReader.loadResource("/assets/ccl/asm/hooks.asm");
transformer.add(new MethodInjector(new ObfMapping("net/minecraft/client/renderer/entity/RenderItem",
"renderItem", "(Lnet/minecraft/item/ItemStack;Lnet/minecraft/client/resources/model/IBakedModel;)V"),
blocks.get("n_IItemRenderer"), blocks.get("IItemRenderer"), true));
}
@Override
public byte[] transform(String name, String tname, byte[] bytes) {
return transformer.transform(name, bytes);
}
}
## Instruction:
Fix srg mapping in hook
## Code After:
package codechicken.lib.asm;
import codechicken.lib.asm.ModularASMTransformer.MethodInjector;
import net.minecraft.launchwrapper.IClassTransformer;
import java.util.Map;
public class RenderHookTransformer implements IClassTransformer
{
private ModularASMTransformer transformer = new ModularASMTransformer();
public RenderHookTransformer() {
Map<String, ASMBlock> blocks = ASMReader.loadResource("/assets/ccl/asm/hooks.asm");
transformer.add(new MethodInjector(new ObfMapping("net/minecraft/client/renderer/entity/RenderItem",
"func_180454_a", "(Lnet/minecraft/item/ItemStack;Lnet/minecraft/client/resources/model/IBakedModel;)V"),
blocks.get("n_IItemRenderer"), blocks.get("IItemRenderer"), true));
}
@Override
public byte[] transform(String name, String tname, byte[] bytes) {
return transformer.transform(name, bytes);
}
}
|
// ... existing code ...
public RenderHookTransformer() {
Map<String, ASMBlock> blocks = ASMReader.loadResource("/assets/ccl/asm/hooks.asm");
transformer.add(new MethodInjector(new ObfMapping("net/minecraft/client/renderer/entity/RenderItem",
"func_180454_a", "(Lnet/minecraft/item/ItemStack;Lnet/minecraft/client/resources/model/IBakedModel;)V"),
blocks.get("n_IItemRenderer"), blocks.get("IItemRenderer"), true));
}
// ... rest of the code ...
|
45275a48fb434e6a9d895da03e290b84c52694f6
|
orbitdeterminator/kep_determination/least_squares.py
|
orbitdeterminator/kep_determination/least_squares.py
|
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
|
import math
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
#rotation about the z-axis about an angle `ang`
def rotz(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((cos_ang,-sin_ang,0.0), (sin_ang, cos_ang,0.0), (0.0,0.0,1.0)))
#rotation about the x-axis about an angle `ang`
def rotx(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((1.0,0.0,0.0), (0.0,cos_ang,-sin_ang), (0.0,sin_ang,cos_ang)))
#rotation from the orbital plane to the inertial frame
#it is composed of the following rotations, in that order:
#1) rotation about the z axis about an angle `omega` (argument of pericenter)
#2) rotation about the x axis about an angle `I` (inclination)
#3) rotation about the z axis about an angle `Omega` (longitude of ascending node)
def op2if(omega,I,Omega):
P2_mul_P3 = np.matmul(rotx(I),rotz(omega))
return np.matmul(rotz(Omega),P2_mul_P3)
omega = math.radians(31.124)
I = math.radians(75.0)
Omega = math.radians(60.0)
# rotation matrix from orbital plane to inertial frame
# two ways to compute it; result should be the same
P_1 = rotz(omega) #rotation about z axis by an angle `omega`
P_2 = rotx(I) #rotation about x axis by an angle `I`
P_3 = rotz(Omega) #rotation about z axis by an angle `Omega`
Rot1 = np.matmul(P_3,np.matmul(P_2,P_1))
Rot2 = op2if(omega,I,Omega)
v = np.array((3.0,-2.0,1.0))
print(I)
print(omega)
print(Omega)
print(Rot1)
print(np.matmul(Rot1,v))
print(Rot2)
|
Add rotation matrix, from orbital plane to inertial frame
|
Add rotation matrix, from orbital plane to inertial frame
|
Python
|
mit
|
aerospaceresearch/orbitdeterminator
|
python
|
## Code Before:
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
## Instruction:
Add rotation matrix, from orbital plane to inertial frame
## Code After:
import math
import numpy as np
import matplotlib.pyplot as plt
# convention:
# a: semi-major axis
# e: eccentricity
# eps: mean longitude at epoch
# Euler angles:
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
#rotation about the z-axis about an angle `ang`
def rotz(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((cos_ang,-sin_ang,0.0), (sin_ang, cos_ang,0.0), (0.0,0.0,1.0)))
#rotation about the x-axis about an angle `ang`
def rotx(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((1.0,0.0,0.0), (0.0,cos_ang,-sin_ang), (0.0,sin_ang,cos_ang)))
#rotation from the orbital plane to the inertial frame
#it is composed of the following rotations, in that order:
#1) rotation about the z axis about an angle `omega` (argument of pericenter)
#2) rotation about the x axis about an angle `I` (inclination)
#3) rotation about the z axis about an angle `Omega` (longitude of ascending node)
def op2if(omega,I,Omega):
P2_mul_P3 = np.matmul(rotx(I),rotz(omega))
return np.matmul(rotz(Omega),P2_mul_P3)
omega = math.radians(31.124)
I = math.radians(75.0)
Omega = math.radians(60.0)
# rotation matrix from orbital plane to inertial frame
# two ways to compute it; result should be the same
P_1 = rotz(omega) #rotation about z axis by an angle `omega`
P_2 = rotx(I) #rotation about x axis by an angle `I`
P_3 = rotz(Omega) #rotation about z axis by an angle `Omega`
Rot1 = np.matmul(P_3,np.matmul(P_2,P_1))
Rot2 = op2if(omega,I,Omega)
v = np.array((3.0,-2.0,1.0))
print(I)
print(omega)
print(Omega)
print(Rot1)
print(np.matmul(Rot1,v))
print(Rot2)
|
// ... existing code ...
import math
import numpy as np
import matplotlib.pyplot as plt
// ... modified code ...
# I: inclination
# Omega: longitude of ascending node
# omega: argument of pericenter
#rotation about the z-axis about an angle `ang`
def rotz(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((cos_ang,-sin_ang,0.0), (sin_ang, cos_ang,0.0), (0.0,0.0,1.0)))
#rotation about the x-axis about an angle `ang`
def rotx(ang):
cos_ang = math.cos(ang)
sin_ang = math.sin(ang)
return np.array(((1.0,0.0,0.0), (0.0,cos_ang,-sin_ang), (0.0,sin_ang,cos_ang)))
#rotation from the orbital plane to the inertial frame
#it is composed of the following rotations, in that order:
#1) rotation about the z axis about an angle `omega` (argument of pericenter)
#2) rotation about the x axis about an angle `I` (inclination)
#3) rotation about the z axis about an angle `Omega` (longitude of ascending node)
def op2if(omega,I,Omega):
P2_mul_P3 = np.matmul(rotx(I),rotz(omega))
return np.matmul(rotz(Omega),P2_mul_P3)
omega = math.radians(31.124)
I = math.radians(75.0)
Omega = math.radians(60.0)
# rotation matrix from orbital plane to inertial frame
# two ways to compute it; result should be the same
P_1 = rotz(omega) #rotation about z axis by an angle `omega`
P_2 = rotx(I) #rotation about x axis by an angle `I`
P_3 = rotz(Omega) #rotation about z axis by an angle `Omega`
Rot1 = np.matmul(P_3,np.matmul(P_2,P_1))
Rot2 = op2if(omega,I,Omega)
v = np.array((3.0,-2.0,1.0))
print(I)
print(omega)
print(Omega)
print(Rot1)
print(np.matmul(Rot1,v))
print(Rot2)
// ... rest of the code ...
|
e01eb66aeb853261c80cb476e71f91a9569b1676
|
client.py
|
client.py
|
import requests
from Adafruit_BMP085 import BMP085
import json
#initialise sensor
print ('Initialising sensor...')
bmp = BMP085(0x77, 3) # ULTRAHIRES Mode
print ('Reading sensor...')
temp = bmp.readTemperature()
pressure = bmp.readPressure()
payload = {'temperature': temp, 'pressure': pressure}
print ('POSTing data...')
server_url = 'http://zephos.duckdns.org:5000/temperature_pressure'
r = requests.post(server_url, data=json.dumps(payload))
print(r.status_code)
|
import requests
from Adafruit_BMP085 import BMP085
import json
#initialise sensor
print ('Initialising sensor...')
bmp = BMP085(0x77, 3) # ULTRAHIRES Mode
print ('Reading sensor...')
temp = bmp.readTemperature()
pressure = bmp.readPressure()
payload = {'temperature': temp, 'pressure': pressure}
print ('POSTing data...')
server_url = 'http://zephos.duckdns.org:5000/temperature_pressure'
headers = {'content-type': 'application/json'}
r = requests.post(server_url, data=json.dumps(payload), headers=headers)
print(r.status_code)
|
Set content-type header of POST.
|
Set content-type header of POST.
|
Python
|
mit
|
JTKBowers/kelvin
|
python
|
## Code Before:
import requests
from Adafruit_BMP085 import BMP085
import json
#initialise sensor
print ('Initialising sensor...')
bmp = BMP085(0x77, 3) # ULTRAHIRES Mode
print ('Reading sensor...')
temp = bmp.readTemperature()
pressure = bmp.readPressure()
payload = {'temperature': temp, 'pressure': pressure}
print ('POSTing data...')
server_url = 'http://zephos.duckdns.org:5000/temperature_pressure'
r = requests.post(server_url, data=json.dumps(payload))
print(r.status_code)
## Instruction:
Set content-type header of POST.
## Code After:
import requests
from Adafruit_BMP085 import BMP085
import json
#initialise sensor
print ('Initialising sensor...')
bmp = BMP085(0x77, 3) # ULTRAHIRES Mode
print ('Reading sensor...')
temp = bmp.readTemperature()
pressure = bmp.readPressure()
payload = {'temperature': temp, 'pressure': pressure}
print ('POSTing data...')
server_url = 'http://zephos.duckdns.org:5000/temperature_pressure'
headers = {'content-type': 'application/json'}
r = requests.post(server_url, data=json.dumps(payload), headers=headers)
print(r.status_code)
|
// ... existing code ...
print ('POSTing data...')
server_url = 'http://zephos.duckdns.org:5000/temperature_pressure'
headers = {'content-type': 'application/json'}
r = requests.post(server_url, data=json.dumps(payload), headers=headers)
print(r.status_code)
// ... rest of the code ...
|
e14f51a4248fe3c4f031a011b483947bfb2f2f5d
|
libtock/tock.c
|
libtock/tock.c
|
void yield_for(bool *cond) {
while(!*cond) {
yield();
}
}
void yield() {
asm volatile("push {lr}\nsvc 0\npop {pc}" ::: "memory", "r0");
}
int subscribe(uint32_t driver, uint32_t subscribe,
subscribe_cb cb, void* userdata) {
asm volatile("svc 1\nbx lr" ::: "memory", "r0");
}
int command(uint32_t driver, uint32_t command, int data) {
asm volatile("svc 2\nbx lr" ::: "memory", "r0");
}
int allow(uint32_t driver, uint32_t allow, void* ptr, size_t size) {
asm volatile("svc 3\nbx lr" ::: "memory", "r0");
}
int memop(uint32_t op_type, int arg1) {
asm volatile("svc 4\nbx lr" ::: "memory", "r0");
}
bool driver_exists(uint32_t driver) {
int ret = command(driver, 0, 0);
return ret >= 0;
}
|
void yield_for(bool *cond) {
while(!*cond) {
yield();
}
}
void yield() {
asm volatile("push {lr}\nsvc 0\npop {pc}" ::: "memory", "r0");
}
int subscribe(uint32_t driver, uint32_t subscribe,
subscribe_cb cb, void* userdata) {
register int ret __asm__ ("r0");
asm volatile("svc 1" ::: "memory", "r0");
return ret;
}
int command(uint32_t driver, uint32_t command, int data) {
register int ret __asm__ ("r0");
asm volatile("svc 2\nbx lr" ::: "memory", "r0");
return ret;
}
int allow(uint32_t driver, uint32_t allow, void* ptr, size_t size) {
register int ret __asm__ ("r0");
asm volatile("svc 3\nbx lr" ::: "memory", "r0");
return ret;
}
int memop(uint32_t op_type, int arg1) {
register int ret __asm__ ("r0");
asm volatile("svc 4\nbx lr" ::: "memory", "r0");
return ret;
}
bool driver_exists(uint32_t driver) {
int ret = command(driver, 0, 0);
return ret >= 0;
}
|
Resolve 'control reaches end of non-void function'
|
Resolve 'control reaches end of non-void function'
Following http://stackoverflow.com/questions/15927583/
|
C
|
apache-2.0
|
tock/libtock-c,tock/libtock-c,tock/libtock-c
|
c
|
## Code Before:
void yield_for(bool *cond) {
while(!*cond) {
yield();
}
}
void yield() {
asm volatile("push {lr}\nsvc 0\npop {pc}" ::: "memory", "r0");
}
int subscribe(uint32_t driver, uint32_t subscribe,
subscribe_cb cb, void* userdata) {
asm volatile("svc 1\nbx lr" ::: "memory", "r0");
}
int command(uint32_t driver, uint32_t command, int data) {
asm volatile("svc 2\nbx lr" ::: "memory", "r0");
}
int allow(uint32_t driver, uint32_t allow, void* ptr, size_t size) {
asm volatile("svc 3\nbx lr" ::: "memory", "r0");
}
int memop(uint32_t op_type, int arg1) {
asm volatile("svc 4\nbx lr" ::: "memory", "r0");
}
bool driver_exists(uint32_t driver) {
int ret = command(driver, 0, 0);
return ret >= 0;
}
## Instruction:
Resolve 'control reaches end of non-void function'
Following http://stackoverflow.com/questions/15927583/
## Code After:
void yield_for(bool *cond) {
while(!*cond) {
yield();
}
}
void yield() {
asm volatile("push {lr}\nsvc 0\npop {pc}" ::: "memory", "r0");
}
int subscribe(uint32_t driver, uint32_t subscribe,
subscribe_cb cb, void* userdata) {
register int ret __asm__ ("r0");
asm volatile("svc 1" ::: "memory", "r0");
return ret;
}
int command(uint32_t driver, uint32_t command, int data) {
register int ret __asm__ ("r0");
asm volatile("svc 2\nbx lr" ::: "memory", "r0");
return ret;
}
int allow(uint32_t driver, uint32_t allow, void* ptr, size_t size) {
register int ret __asm__ ("r0");
asm volatile("svc 3\nbx lr" ::: "memory", "r0");
return ret;
}
int memop(uint32_t op_type, int arg1) {
register int ret __asm__ ("r0");
asm volatile("svc 4\nbx lr" ::: "memory", "r0");
return ret;
}
bool driver_exists(uint32_t driver) {
int ret = command(driver, 0, 0);
return ret >= 0;
}
|
# ... existing code ...
int subscribe(uint32_t driver, uint32_t subscribe,
subscribe_cb cb, void* userdata) {
register int ret __asm__ ("r0");
asm volatile("svc 1" ::: "memory", "r0");
return ret;
}
int command(uint32_t driver, uint32_t command, int data) {
register int ret __asm__ ("r0");
asm volatile("svc 2\nbx lr" ::: "memory", "r0");
return ret;
}
int allow(uint32_t driver, uint32_t allow, void* ptr, size_t size) {
register int ret __asm__ ("r0");
asm volatile("svc 3\nbx lr" ::: "memory", "r0");
return ret;
}
int memop(uint32_t op_type, int arg1) {
register int ret __asm__ ("r0");
asm volatile("svc 4\nbx lr" ::: "memory", "r0");
return ret;
}
bool driver_exists(uint32_t driver) {
# ... rest of the code ...
|
d95827a4a031ac54b31b9ff0997a8248456e9d50
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
def included_package(p):
return p.startswith('spacq.') or p == 'spacq'
setup(
name='SpanishAcquisition',
version='2.0.0a1',
author='Dmitri Iouchtchenko',
author_email='[email protected]',
maintainer='Grant Watson',
maintainer_email='[email protected]',
description='Package for interfacing with devices and building user '
'interfaces.',
license='BSD',
url='http://0.github.com/SpanishAcquisition/',
packages=[p for p in find_packages() if included_package(p)],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
)
|
from setuptools import setup, find_packages
def included_package(p):
return p.startswith('spacq.') or p == 'spacq'
setup(
name='SpanishAcquisition',
version='2.0.0a1',
author='Dmitri Iouchtchenko',
author_email='[email protected]',
maintainer='Grant Watson',
maintainer_email='[email protected]',
description='Package for interfacing with devices and building user '
'interfaces.',
license='BSD',
url='http://ghwatson.github.com/SpanishAcquisitionIQC/',
packages=[p for p in find_packages() if included_package(p)],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
)
|
Change URL to new forked repo
|
Change URL to new forked repo
|
Python
|
bsd-2-clause
|
ghwatson/SpanishAcquisitionIQC,ghwatson/SpanishAcquisitionIQC
|
python
|
## Code Before:
from setuptools import setup, find_packages
def included_package(p):
return p.startswith('spacq.') or p == 'spacq'
setup(
name='SpanishAcquisition',
version='2.0.0a1',
author='Dmitri Iouchtchenko',
author_email='[email protected]',
maintainer='Grant Watson',
maintainer_email='[email protected]',
description='Package for interfacing with devices and building user '
'interfaces.',
license='BSD',
url='http://0.github.com/SpanishAcquisition/',
packages=[p for p in find_packages() if included_package(p)],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
)
## Instruction:
Change URL to new forked repo
## Code After:
from setuptools import setup, find_packages
def included_package(p):
return p.startswith('spacq.') or p == 'spacq'
setup(
name='SpanishAcquisition',
version='2.0.0a1',
author='Dmitri Iouchtchenko',
author_email='[email protected]',
maintainer='Grant Watson',
maintainer_email='[email protected]',
description='Package for interfacing with devices and building user '
'interfaces.',
license='BSD',
url='http://ghwatson.github.com/SpanishAcquisitionIQC/',
packages=[p for p in find_packages() if included_package(p)],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
)
|
...
description='Package for interfacing with devices and building user '
'interfaces.',
license='BSD',
url='http://ghwatson.github.com/SpanishAcquisitionIQC/',
packages=[p for p in find_packages() if included_package(p)],
classifiers=[
'Development Status :: 5 - Production/Stable',
...
|
c383e06d51d4e59d400ab6fd62eff2359ab4e728
|
python/the_birthday_bar.py
|
python/the_birthday_bar.py
|
import itertools
import collections
def sliding_window(n, seq):
"""
Copied from toolz
https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window
A sequence of overlapping subsequences
>>> list(sliding_window(2, [1, 2, 3, 4]))
[(1, 2), (2, 3), (3, 4)]
This function creates a sliding window suitable for transformations like
sliding means / smoothing
>>> mean = lambda seq: float(sum(seq)) / len(seq)
>>> list(map(mean, sliding_window(2, [1, 2, 3, 4])))
[1.5, 2.5, 3.5]
"""
return zip(*(collections.deque(itertools.islice(it, i), 0) or it
for i, it in enumerate(itertools.tee(seq, n))))
def birthday_chocolate(squares, day, month):
birthday_chocolates = 0
for piece in sliding_window(month, squares):
if sum(piece) == day:
birthday_chocolates += 1
return birthday_chocolates
_ = int(input().strip())
SQUARES = list(map(int, input().strip().split(' ')))
DAY, MONTH = map(int, input().strip().split(' '))
print(birthday_chocolate(SQUARES, DAY, MONTH))
|
import itertools
import collections
def sliding_window(n, seq):
"""
Copied from toolz
https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window
A sequence of overlapping subsequences
>>> list(sliding_window(2, [1, 2, 3, 4]))
[(1, 2), (2, 3), (3, 4)]
This function creates a sliding window suitable for transformations like
sliding means / smoothing
>>> mean = lambda seq: float(sum(seq)) / len(seq)
>>> list(map(mean, sliding_window(2, [1, 2, 3, 4])))
[1.5, 2.5, 3.5]
"""
return zip(*(collections.deque(itertools.islice(it, i), 0) or it
for i, it in enumerate(itertools.tee(seq, n))))
def birthday_chocolate(squares, day, month):
consecutive_sums = map(lambda piece: sum(piece), sliding_window(month, squares))
birthday_bars = list(filter(lambda consecutive_sum: day == consecutive_sum,
consecutive_sums))
return len(birthday_bars)
_ = int(input().strip())
SQUARES = list(map(int, input().strip().split(' ')))
DAY, MONTH = map(int, input().strip().split(' '))
print(birthday_chocolate(SQUARES, DAY, MONTH))
|
Refactor to use map and filter
|
Refactor to use map and filter
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
python
|
## Code Before:
import itertools
import collections
def sliding_window(n, seq):
"""
Copied from toolz
https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window
A sequence of overlapping subsequences
>>> list(sliding_window(2, [1, 2, 3, 4]))
[(1, 2), (2, 3), (3, 4)]
This function creates a sliding window suitable for transformations like
sliding means / smoothing
>>> mean = lambda seq: float(sum(seq)) / len(seq)
>>> list(map(mean, sliding_window(2, [1, 2, 3, 4])))
[1.5, 2.5, 3.5]
"""
return zip(*(collections.deque(itertools.islice(it, i), 0) or it
for i, it in enumerate(itertools.tee(seq, n))))
def birthday_chocolate(squares, day, month):
birthday_chocolates = 0
for piece in sliding_window(month, squares):
if sum(piece) == day:
birthday_chocolates += 1
return birthday_chocolates
_ = int(input().strip())
SQUARES = list(map(int, input().strip().split(' ')))
DAY, MONTH = map(int, input().strip().split(' '))
print(birthday_chocolate(SQUARES, DAY, MONTH))
## Instruction:
Refactor to use map and filter
## Code After:
import itertools
import collections
def sliding_window(n, seq):
"""
Copied from toolz
https://toolz.readthedocs.io/en/latest/_modules/toolz/itertoolz.html#sliding_window
A sequence of overlapping subsequences
>>> list(sliding_window(2, [1, 2, 3, 4]))
[(1, 2), (2, 3), (3, 4)]
This function creates a sliding window suitable for transformations like
sliding means / smoothing
>>> mean = lambda seq: float(sum(seq)) / len(seq)
>>> list(map(mean, sliding_window(2, [1, 2, 3, 4])))
[1.5, 2.5, 3.5]
"""
return zip(*(collections.deque(itertools.islice(it, i), 0) or it
for i, it in enumerate(itertools.tee(seq, n))))
def birthday_chocolate(squares, day, month):
consecutive_sums = map(lambda piece: sum(piece), sliding_window(month, squares))
birthday_bars = list(filter(lambda consecutive_sum: day == consecutive_sum,
consecutive_sums))
return len(birthday_bars)
_ = int(input().strip())
SQUARES = list(map(int, input().strip().split(' ')))
DAY, MONTH = map(int, input().strip().split(' '))
print(birthday_chocolate(SQUARES, DAY, MONTH))
|
# ... existing code ...
for i, it in enumerate(itertools.tee(seq, n))))
def birthday_chocolate(squares, day, month):
consecutive_sums = map(lambda piece: sum(piece), sliding_window(month, squares))
birthday_bars = list(filter(lambda consecutive_sum: day == consecutive_sum,
consecutive_sums))
return len(birthday_bars)
_ = int(input().strip())
SQUARES = list(map(int, input().strip().split(' ')))
# ... rest of the code ...
|
04e64fea6e11a188a53d0b8d69ef97686868be1c
|
tests/py_ext_tests/test_png.py
|
tests/py_ext_tests/test_png.py
|
import unittest
import faint
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, 0)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
import unittest
import faint
from faint import png
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, png.RGB)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
Use the png module in test.
|
Use the png module in test.
|
Python
|
apache-2.0
|
lukas-ke/faint-graphics-editor,lukas-ke/faint-graphics-editor,lukas-ke/faint-graphics-editor,lukas-ke/faint-graphics-editor
|
python
|
## Code Before:
import unittest
import faint
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, 0)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
## Instruction:
Use the png module in test.
## Code After:
import unittest
import faint
from faint import png
import os
import py_ext_tests
class TestPng(unittest.TestCase):
def test_write_png(self):
out_dir = py_ext_tests.make_test_dir(self)
b1 = faint.Bitmap((5,7))
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, png.RGB)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
self.assertEqual(tEXt, {})
def test_bad_args(self):
with self.assertRaises(TypeError):
faint.write_png("not a bitmap", py_ext_tests.make_test_dir(self), 0)
|
# ... existing code ...
import unittest
import faint
from faint import png
import os
import py_ext_tests
# ... modified code ...
b1.set_pixel((0,0),(255,0,255))
fn = os.path.join(out_dir, "b1.png")
faint.write_png(b1, fn, png.RGB)
b2, tEXt = faint.read_png(fn)
self.assertEqual(b2.get_size(), (5,7))
# ... rest of the code ...
|
919931fdc7a5e6b512c5b9731f779572e540108b
|
src/main/java/com/bigcommerce/catalog/models/CustomFieldResponse.java
|
src/main/java/com/bigcommerce/catalog/models/CustomFieldResponse.java
|
package com.bigcommerce.catalog.models;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.LinkedList;
import java.util.List;
@XmlRootElement
public class CustomFieldResponse {
private CustomField data;
public CustomField getData() {
return data;
}
public void setData(CustomField data) {
this.data = data;
}
}
|
package com.bigcommerce.catalog.models;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.LinkedList;
import java.util.List;
@XmlRootElement
public class CustomFieldResponse {
private CustomField data;
private Meta meta = new Meta();
public CustomField getData() {
return data;
}
public void setData(CustomField data) {
this.data = data;
}
public Meta getMeta() {
return meta;
}
public void setMeta(final Meta meta) {
this.meta = meta;
}
}
|
Add meta field to custom field response
|
Add meta field to custom field response
|
Java
|
apache-2.0
|
rjdavis3/bigcommerce-sdk,rjdavis3/bigcommerce-sdk
|
java
|
## Code Before:
package com.bigcommerce.catalog.models;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.LinkedList;
import java.util.List;
@XmlRootElement
public class CustomFieldResponse {
private CustomField data;
public CustomField getData() {
return data;
}
public void setData(CustomField data) {
this.data = data;
}
}
## Instruction:
Add meta field to custom field response
## Code After:
package com.bigcommerce.catalog.models;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.LinkedList;
import java.util.List;
@XmlRootElement
public class CustomFieldResponse {
private CustomField data;
private Meta meta = new Meta();
public CustomField getData() {
return data;
}
public void setData(CustomField data) {
this.data = data;
}
public Meta getMeta() {
return meta;
}
public void setMeta(final Meta meta) {
this.meta = meta;
}
}
|
// ... existing code ...
@XmlRootElement
public class CustomFieldResponse {
private CustomField data;
private Meta meta = new Meta();
public CustomField getData() {
return data;
// ... modified code ...
public void setData(CustomField data) {
this.data = data;
}
public Meta getMeta() {
return meta;
}
public void setMeta(final Meta meta) {
this.meta = meta;
}
}
// ... rest of the code ...
|
c028490252290a2d4ca89eb646a8347d5e6077de
|
src/org/yeastrc/xlink/base/config_system_table_common_access/ConfigSystemsKeysSharedConstants.java
|
src/org/yeastrc/xlink/base/config_system_table_common_access/ConfigSystemsKeysSharedConstants.java
|
package org.yeastrc.xlink.base.config_system_table_common_access;
import java.util.HashSet;
import java.util.Set;
public class ConfigSystemsKeysSharedConstants {
////// Any plain text inputs need to be added to textConfigKeys in the "static {}" at the bottom
//// Proxl XML File Import Set Up keys
public static final String PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY = "proxl_xml_file_import_temp_dir";
// Lists of config keys for validation on save
public static final Set<String> textConfigKeys = new HashSet<>();
static {
textConfigKeys.add( PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY );
}
}
|
package org.yeastrc.xlink.base.config_system_table_common_access;
import java.util.HashSet;
import java.util.Set;
public class ConfigSystemsKeysSharedConstants {
////// Any plain text inputs need to be added to textConfigKeys in the "static {}" at the bottom
//// Proxl XML File Import Set Up keys
public static final String PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY = "proxl_xml_file_import_temp_dir";
public static final String SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY = "scan_file_import_allowed_via_web_submit";
// Lists of config keys for validation on save
public static final Set<String> textConfigKeys = new HashSet<>();
static {
textConfigKeys.add( PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY );
textConfigKeys.add( SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY );
}
}
|
Add Constant for config: SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY
|
Add Constant for config: SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY
|
Java
|
apache-2.0
|
yeastrc/proxl-web-app,yeastrc/proxl-web-app,yeastrc/proxl-web-app
|
java
|
## Code Before:
package org.yeastrc.xlink.base.config_system_table_common_access;
import java.util.HashSet;
import java.util.Set;
public class ConfigSystemsKeysSharedConstants {
////// Any plain text inputs need to be added to textConfigKeys in the "static {}" at the bottom
//// Proxl XML File Import Set Up keys
public static final String PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY = "proxl_xml_file_import_temp_dir";
// Lists of config keys for validation on save
public static final Set<String> textConfigKeys = new HashSet<>();
static {
textConfigKeys.add( PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY );
}
}
## Instruction:
Add Constant for config: SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY
## Code After:
package org.yeastrc.xlink.base.config_system_table_common_access;
import java.util.HashSet;
import java.util.Set;
public class ConfigSystemsKeysSharedConstants {
////// Any plain text inputs need to be added to textConfigKeys in the "static {}" at the bottom
//// Proxl XML File Import Set Up keys
public static final String PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY = "proxl_xml_file_import_temp_dir";
public static final String SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY = "scan_file_import_allowed_via_web_submit";
// Lists of config keys for validation on save
public static final Set<String> textConfigKeys = new HashSet<>();
static {
textConfigKeys.add( PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY );
textConfigKeys.add( SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY );
}
}
|
// ... existing code ...
public static final String PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY = "proxl_xml_file_import_temp_dir";
public static final String SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY = "scan_file_import_allowed_via_web_submit";
// Lists of config keys for validation on save
// ... modified code ...
static {
textConfigKeys.add( PROXL_XML_FILE_IMPORT_TEMP_DIR_KEY );
textConfigKeys.add( SCAN_FILE_IMPORT_ALLOWED_VIA_WEB_SUBMIT_KEY );
}
}
// ... rest of the code ...
|
6e2bee2717a640046dd323972d9c7238d8184797
|
include/ccspec/core/example_group.h
|
include/ccspec/core/example_group.h
|
namespace ccspec {
namespace core {
class ExampleGroup;
typedef ExampleGroup* Creator(std::string desc, std::function<void ()> spec);
class ExampleGroup {
public:
virtual ~ExampleGroup();
void addChild(ExampleGroup*);
protected:
ExampleGroup(std::string desc);
private:
std::string desc_;
std::list<ExampleGroup*> children_;
friend Creator describe;
friend Creator context;
};
extern std::stack<ExampleGroup*> groups_being_defined;
Creator describe;
Creator context;
} // namespace core
} // namespace ccspec
#endif // CCSPEC_CORE_EXAMPLE_GROUP_H_
|
namespace ccspec {
namespace core {
class ExampleGroup;
typedef ExampleGroup* Creator(std::string desc, std::function<void ()> spec);
extern std::stack<ExampleGroup*> groups_being_defined;
class ExampleGroup {
public:
virtual ~ExampleGroup();
void addChild(ExampleGroup*);
protected:
ExampleGroup(std::string desc);
private:
std::string desc_;
std::list<ExampleGroup*> children_;
friend Creator describe;
friend Creator context;
};
Creator describe;
Creator context;
} // namespace core
} // namespace ccspec
#endif // CCSPEC_CORE_EXAMPLE_GROUP_H_
|
Move global variable declaration for consistency
|
Move global variable declaration for consistency
|
C
|
mit
|
zhangsu/ccspec,tempbottle/ccspec,michaelachrisco/ccspec,zhangsu/ccspec,michaelachrisco/ccspec,tempbottle/ccspec,michaelachrisco/ccspec,tempbottle/ccspec,zhangsu/ccspec
|
c
|
## Code Before:
namespace ccspec {
namespace core {
class ExampleGroup;
typedef ExampleGroup* Creator(std::string desc, std::function<void ()> spec);
class ExampleGroup {
public:
virtual ~ExampleGroup();
void addChild(ExampleGroup*);
protected:
ExampleGroup(std::string desc);
private:
std::string desc_;
std::list<ExampleGroup*> children_;
friend Creator describe;
friend Creator context;
};
extern std::stack<ExampleGroup*> groups_being_defined;
Creator describe;
Creator context;
} // namespace core
} // namespace ccspec
#endif // CCSPEC_CORE_EXAMPLE_GROUP_H_
## Instruction:
Move global variable declaration for consistency
## Code After:
namespace ccspec {
namespace core {
class ExampleGroup;
typedef ExampleGroup* Creator(std::string desc, std::function<void ()> spec);
extern std::stack<ExampleGroup*> groups_being_defined;
class ExampleGroup {
public:
virtual ~ExampleGroup();
void addChild(ExampleGroup*);
protected:
ExampleGroup(std::string desc);
private:
std::string desc_;
std::list<ExampleGroup*> children_;
friend Creator describe;
friend Creator context;
};
Creator describe;
Creator context;
} // namespace core
} // namespace ccspec
#endif // CCSPEC_CORE_EXAMPLE_GROUP_H_
|
# ... existing code ...
class ExampleGroup;
typedef ExampleGroup* Creator(std::string desc, std::function<void ()> spec);
extern std::stack<ExampleGroup*> groups_being_defined;
class ExampleGroup {
public:
# ... modified code ...
friend Creator context;
};
Creator describe;
Creator context;
# ... rest of the code ...
|
76b40a801b69023f5983dcfa4ecd5e904792f131
|
paypal/standard/pdt/forms.py
|
paypal/standard/pdt/forms.py
|
from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
fields = '__all__'
|
from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
exclude = ('ipaddress', 'flag', 'flag_code', 'flag_info', 'query', 'response', 'created_at', 'updated', 'form_view',)
|
Add non-PayPal fields to exclude
|
Add non-PayPal fields to exclude
All the non-paypal fields are blanked if you don't exclude them from the form.
|
Python
|
mit
|
spookylukey/django-paypal,rsalmaso/django-paypal,spookylukey/django-paypal,rsalmaso/django-paypal,rsalmaso/django-paypal,GamesDoneQuick/django-paypal,spookylukey/django-paypal,GamesDoneQuick/django-paypal
|
python
|
## Code Before:
from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
fields = '__all__'
## Instruction:
Add non-PayPal fields to exclude
All the non-paypal fields are blanked if you don't exclude them from the form.
## Code After:
from __future__ import unicode_literals
import django
from paypal.standard.forms import PayPalStandardBaseForm
from paypal.standard.pdt.models import PayPalPDT
class PayPalPDTForm(PayPalStandardBaseForm):
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
exclude = ('ipaddress', 'flag', 'flag_code', 'flag_info', 'query', 'response', 'created_at', 'updated', 'form_view',)
|
# ... existing code ...
class Meta:
model = PayPalPDT
if django.VERSION >= (1, 6):
exclude = ('ipaddress', 'flag', 'flag_code', 'flag_info', 'query', 'response', 'created_at', 'updated', 'form_view',)
# ... rest of the code ...
|
fb65fedbf60481d37e097ea9db290f53b84cae26
|
giveaminute/migrations/versions/001_Initial_models.py
|
giveaminute/migrations/versions/001_Initial_models.py
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
sql = initial_file.read()
migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
# Uncomment the following lines if you do not yet have a database to set up.
# If you run this migration, it will blow away the data currently contained
# in your database and start new.
#
# with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
# sql = initial_file.read()
# migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases away
|
Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases away
|
Python
|
agpl-3.0
|
codeforamerica/Change-By-Us,localprojects/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,localprojects/Change-By-Us,codeforeurope/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,localprojects/Change-By-Us,localprojects/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,codeforamerica/Change-By-Us,codeforeurope/Change-By-Us
|
python
|
## Code Before:
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
sql = initial_file.read()
migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
## Instruction:
Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases away
## Code After:
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
# Uncomment the following lines if you do not yet have a database to set up.
# If you run this migration, it will blow away the data currently contained
# in your database and start new.
#
# with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
# sql = initial_file.read()
# migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
...
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
# Uncomment the following lines if you do not yet have a database to set up.
# If you run this migration, it will blow away the data currently contained
# in your database and start new.
#
# with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
# sql = initial_file.read()
# migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
...
|
b1e2a8a67916dfe448349f476c3e5bdd59ae797c
|
TestUtils/src/main/java/com/braintreepayments/testutils/BraintreeActivityTestRule.java
|
TestUtils/src/main/java/com/braintreepayments/testutils/BraintreeActivityTestRule.java
|
package com.braintreepayments.testutils;
import android.app.Activity;
import android.app.KeyguardManager;
import android.content.Context;
import android.support.test.espresso.intent.Intents;
import android.support.test.rule.ActivityTestRule;
import static android.support.test.InstrumentationRegistry.getTargetContext;
import static com.braintreepayments.testutils.SharedPreferencesHelper.getSharedPreferences;
public class BraintreeActivityTestRule<T extends Activity> extends ActivityTestRule<T> {
public BraintreeActivityTestRule(Class<T> activityClass) {
super(activityClass);
}
public BraintreeActivityTestRule(Class<T> activityClass, boolean initialTouchMode,
boolean launchActivity) {
super(activityClass, initialTouchMode, launchActivity);
}
@Override
protected void beforeActivityLaunched() {
super.beforeActivityLaunched();
getSharedPreferences().edit().clear().commit();
((KeyguardManager) getTargetContext().getSystemService(Context.KEYGUARD_SERVICE))
.newKeyguardLock("BraintreeActivityTestRule")
.disableKeyguard();
}
@Override
protected void afterActivityLaunched() {
Intents.init();
super.afterActivityLaunched();
}
@Override
protected void afterActivityFinished() {
super.afterActivityFinished();
Intents.release();
getSharedPreferences().edit().clear().commit();
}
}
|
package com.braintreepayments.testutils;
import android.app.Activity;
import android.app.KeyguardManager;
import android.app.KeyguardManager.KeyguardLock;
import android.content.Context;
import android.support.test.espresso.intent.Intents;
import android.support.test.rule.ActivityTestRule;
import static android.support.test.InstrumentationRegistry.getTargetContext;
import static com.braintreepayments.testutils.SharedPreferencesHelper.getSharedPreferences;
public class BraintreeActivityTestRule<T extends Activity> extends ActivityTestRule<T> {
private KeyguardLock mKeyguardLock;
public BraintreeActivityTestRule(Class<T> activityClass) {
super(activityClass);
init();
}
public BraintreeActivityTestRule(Class<T> activityClass, boolean initialTouchMode,
boolean launchActivity) {
super(activityClass, initialTouchMode, launchActivity);
init();
}
private void init() {
getSharedPreferences().edit().clear().commit();
mKeyguardLock = ((KeyguardManager) getTargetContext().getSystemService(Context.KEYGUARD_SERVICE))
.newKeyguardLock("BraintreeActivityTestRule");
mKeyguardLock.disableKeyguard();
}
@Override
protected void afterActivityLaunched() {
Intents.init();
super.afterActivityLaunched();
}
@Override
protected void afterActivityFinished() {
super.afterActivityFinished();
try {
Intents.release();
} catch (IllegalStateException ignored) {}
getSharedPreferences().edit().clear().commit();
mKeyguardLock.reenableKeyguard();
}
}
|
Fix incorrect test failures and Keyguard leaks
|
Fix incorrect test failures and Keyguard leaks
|
Java
|
mit
|
braintree/braintree_android,braintree/braintree_android,braintree/braintree_android,braintree/braintree_android
|
java
|
## Code Before:
package com.braintreepayments.testutils;
import android.app.Activity;
import android.app.KeyguardManager;
import android.content.Context;
import android.support.test.espresso.intent.Intents;
import android.support.test.rule.ActivityTestRule;
import static android.support.test.InstrumentationRegistry.getTargetContext;
import static com.braintreepayments.testutils.SharedPreferencesHelper.getSharedPreferences;
public class BraintreeActivityTestRule<T extends Activity> extends ActivityTestRule<T> {
public BraintreeActivityTestRule(Class<T> activityClass) {
super(activityClass);
}
public BraintreeActivityTestRule(Class<T> activityClass, boolean initialTouchMode,
boolean launchActivity) {
super(activityClass, initialTouchMode, launchActivity);
}
@Override
protected void beforeActivityLaunched() {
super.beforeActivityLaunched();
getSharedPreferences().edit().clear().commit();
((KeyguardManager) getTargetContext().getSystemService(Context.KEYGUARD_SERVICE))
.newKeyguardLock("BraintreeActivityTestRule")
.disableKeyguard();
}
@Override
protected void afterActivityLaunched() {
Intents.init();
super.afterActivityLaunched();
}
@Override
protected void afterActivityFinished() {
super.afterActivityFinished();
Intents.release();
getSharedPreferences().edit().clear().commit();
}
}
## Instruction:
Fix incorrect test failures and Keyguard leaks
## Code After:
package com.braintreepayments.testutils;
import android.app.Activity;
import android.app.KeyguardManager;
import android.app.KeyguardManager.KeyguardLock;
import android.content.Context;
import android.support.test.espresso.intent.Intents;
import android.support.test.rule.ActivityTestRule;
import static android.support.test.InstrumentationRegistry.getTargetContext;
import static com.braintreepayments.testutils.SharedPreferencesHelper.getSharedPreferences;
public class BraintreeActivityTestRule<T extends Activity> extends ActivityTestRule<T> {
private KeyguardLock mKeyguardLock;
public BraintreeActivityTestRule(Class<T> activityClass) {
super(activityClass);
init();
}
public BraintreeActivityTestRule(Class<T> activityClass, boolean initialTouchMode,
boolean launchActivity) {
super(activityClass, initialTouchMode, launchActivity);
init();
}
private void init() {
getSharedPreferences().edit().clear().commit();
mKeyguardLock = ((KeyguardManager) getTargetContext().getSystemService(Context.KEYGUARD_SERVICE))
.newKeyguardLock("BraintreeActivityTestRule");
mKeyguardLock.disableKeyguard();
}
@Override
protected void afterActivityLaunched() {
Intents.init();
super.afterActivityLaunched();
}
@Override
protected void afterActivityFinished() {
super.afterActivityFinished();
try {
Intents.release();
} catch (IllegalStateException ignored) {}
getSharedPreferences().edit().clear().commit();
mKeyguardLock.reenableKeyguard();
}
}
|
// ... existing code ...
import android.app.Activity;
import android.app.KeyguardManager;
import android.app.KeyguardManager.KeyguardLock;
import android.content.Context;
import android.support.test.espresso.intent.Intents;
import android.support.test.rule.ActivityTestRule;
// ... modified code ...
public class BraintreeActivityTestRule<T extends Activity> extends ActivityTestRule<T> {
private KeyguardLock mKeyguardLock;
public BraintreeActivityTestRule(Class<T> activityClass) {
super(activityClass);
init();
}
public BraintreeActivityTestRule(Class<T> activityClass, boolean initialTouchMode,
boolean launchActivity) {
super(activityClass, initialTouchMode, launchActivity);
init();
}
private void init() {
getSharedPreferences().edit().clear().commit();
mKeyguardLock = ((KeyguardManager) getTargetContext().getSystemService(Context.KEYGUARD_SERVICE))
.newKeyguardLock("BraintreeActivityTestRule");
mKeyguardLock.disableKeyguard();
}
@Override
...
protected void afterActivityFinished() {
super.afterActivityFinished();
try {
Intents.release();
} catch (IllegalStateException ignored) {}
getSharedPreferences().edit().clear().commit();
mKeyguardLock.reenableKeyguard();
}
}
// ... rest of the code ...
|
3bddeade05ca5ddc799733baa1545aa2b8b68060
|
hoomd/tune/custom_tuner.py
|
hoomd/tune/custom_tuner.py
|
from hoomd import _hoomd
from hoomd.custom import (
_CustomOperation, _InternalCustomOperation, Action)
from hoomd.operation import _Tuner
class _TunerProperty:
@property
def updater(self):
return self._action
@updater.setter
def updater(self, updater):
if isinstance(updater, Action):
self._action = updater
else:
raise ValueError(
"updater must be an instance of hoomd.custom.Action")
class CustomTuner(_CustomOperation, _TunerProperty, _Tuner):
"""Tuner wrapper for `hoomd.custom.Action` objects.
For usage see `hoomd.custom._CustomOperation`.
"""
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
def attach(self, simulation):
self._cpp_obj = getattr(_hoomd, self._cpp_class_name)(
simulation.state._cpp_sys_def, self.trigger, self._action)
super().attach(simulation)
self._action.attach(simulation)
class _InternalCustomTuner(
_InternalCustomOperation, _TunerProperty, _Tuner):
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
|
from hoomd import _hoomd
from hoomd.operation import _Operation
from hoomd.custom import (
_CustomOperation, _InternalCustomOperation, Action)
from hoomd.operation import _Tuner
class _TunerProperty:
@property
def tuner(self):
return self._action
@tuner.setter
def tuner(self, tuner):
if isinstance(tuner, Action):
self._action = tuner
else:
raise ValueError(
"updater must be an instance of hoomd.custom.Action")
class CustomTuner(_CustomOperation, _TunerProperty, _Tuner):
"""Tuner wrapper for `hoomd.custom.Action` objects.
For usage see `hoomd.custom._CustomOperation`.
"""
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
def attach(self, simulation):
self._cpp_obj = getattr(_hoomd, self._cpp_class_name)(
simulation.state._cpp_sys_def, self.trigger, self._action)
self._action.attach(simulation)
_Operation.attach(self, simulation)
class _InternalCustomTuner(
_InternalCustomOperation, _TunerProperty, _Tuner):
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
def attach(self, simulation):
self._cpp_obj = getattr(_hoomd, self._cpp_class_name)(
simulation.state._cpp_sys_def, self.trigger, self._action)
self._action.attach(simulation)
_Operation.attach(self, simulation)
|
Fix attaching on custom tuners
|
Fix attaching on custom tuners
|
Python
|
bsd-3-clause
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
python
|
## Code Before:
from hoomd import _hoomd
from hoomd.custom import (
_CustomOperation, _InternalCustomOperation, Action)
from hoomd.operation import _Tuner
class _TunerProperty:
@property
def updater(self):
return self._action
@updater.setter
def updater(self, updater):
if isinstance(updater, Action):
self._action = updater
else:
raise ValueError(
"updater must be an instance of hoomd.custom.Action")
class CustomTuner(_CustomOperation, _TunerProperty, _Tuner):
"""Tuner wrapper for `hoomd.custom.Action` objects.
For usage see `hoomd.custom._CustomOperation`.
"""
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
def attach(self, simulation):
self._cpp_obj = getattr(_hoomd, self._cpp_class_name)(
simulation.state._cpp_sys_def, self.trigger, self._action)
super().attach(simulation)
self._action.attach(simulation)
class _InternalCustomTuner(
_InternalCustomOperation, _TunerProperty, _Tuner):
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
## Instruction:
Fix attaching on custom tuners
## Code After:
from hoomd import _hoomd
from hoomd.operation import _Operation
from hoomd.custom import (
_CustomOperation, _InternalCustomOperation, Action)
from hoomd.operation import _Tuner
class _TunerProperty:
@property
def tuner(self):
return self._action
@tuner.setter
def tuner(self, tuner):
if isinstance(tuner, Action):
self._action = tuner
else:
raise ValueError(
"updater must be an instance of hoomd.custom.Action")
class CustomTuner(_CustomOperation, _TunerProperty, _Tuner):
"""Tuner wrapper for `hoomd.custom.Action` objects.
For usage see `hoomd.custom._CustomOperation`.
"""
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
def attach(self, simulation):
self._cpp_obj = getattr(_hoomd, self._cpp_class_name)(
simulation.state._cpp_sys_def, self.trigger, self._action)
self._action.attach(simulation)
_Operation.attach(self, simulation)
class _InternalCustomTuner(
_InternalCustomOperation, _TunerProperty, _Tuner):
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
def attach(self, simulation):
self._cpp_obj = getattr(_hoomd, self._cpp_class_name)(
simulation.state._cpp_sys_def, self.trigger, self._action)
self._action.attach(simulation)
_Operation.attach(self, simulation)
|
...
from hoomd import _hoomd
from hoomd.operation import _Operation
from hoomd.custom import (
_CustomOperation, _InternalCustomOperation, Action)
from hoomd.operation import _Tuner
...
class _TunerProperty:
@property
def tuner(self):
return self._action
@tuner.setter
def tuner(self, tuner):
if isinstance(tuner, Action):
self._action = tuner
else:
raise ValueError(
"updater must be an instance of hoomd.custom.Action")
...
def attach(self, simulation):
self._cpp_obj = getattr(_hoomd, self._cpp_class_name)(
simulation.state._cpp_sys_def, self.trigger, self._action)
self._action.attach(simulation)
_Operation.attach(self, simulation)
class _InternalCustomTuner(
...
_InternalCustomOperation, _TunerProperty, _Tuner):
_cpp_list_name = 'tuners'
_cpp_class_name = 'PythonTuner'
def attach(self, simulation):
self._cpp_obj = getattr(_hoomd, self._cpp_class_name)(
simulation.state._cpp_sys_def, self.trigger, self._action)
self._action.attach(simulation)
_Operation.attach(self, simulation)
...
|
9532a28dacefec67ea67f94cf992a505d8a6629d
|
utilities/ticker-update.py
|
utilities/ticker-update.py
|
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
|
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
CONF_FILE = "ticker-updates.conf"
secutities = []
with open(CONF_FILE, "r") as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
print(securities)
for security in securities:
print(security)
symbol, sell_price = security.split(',')
print(f"sy: {symbol} p: {sell_price}")
query = URL + symbol
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = float(span.get_text())
table_row = soup.select('table td')
open = float(table_row[3].text)
print(f"{symbol:>6}: {sell_price:<6} {open:<6} {price:<6} {open - price:<6}")
|
Fix file read, start on sell price
|
Fix file read, start on sell price
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
python
|
## Code Before:
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
secutities = []
with open("ticker-updates,cong", r) as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
for security in securities:
query = URL + security
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = span.get_text()
table_row = soup.select('table td')
open = table_row[3].text
print(f"{security:>6}: {open:<6} {price:<6}")
## Instruction:
Fix file read, start on sell price
## Code After:
import requests
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
CONF_FILE = "ticker-updates.conf"
secutities = []
with open(CONF_FILE, "r") as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
print(securities)
for security in securities:
print(security)
symbol, sell_price = security.split(',')
print(f"sy: {symbol} p: {sell_price}")
query = URL + symbol
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = float(span.get_text())
table_row = soup.select('table td')
open = float(table_row[3].text)
print(f"{symbol:>6}: {sell_price:<6} {open:<6} {price:<6} {open - price:<6}")
|
// ... existing code ...
from bs4 import BeautifulSoup
URL = 'https://finance.yahoo.com/quote/'
CONF_FILE = "ticker-updates.conf"
secutities = []
with open(CONF_FILE, "r") as conf_file:
securities = conf_file.readlines()
securities = [s.strip() for s in securities]
print(securities)
for security in securities:
print(security)
symbol, sell_price = security.split(',')
print(f"sy: {symbol} p: {sell_price}")
query = URL + symbol
page = requests.get(query)
soup = BeautifulSoup(page.content, 'html.parser')
span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"})
price = float(span.get_text())
table_row = soup.select('table td')
open = float(table_row[3].text)
print(f"{symbol:>6}: {sell_price:<6} {open:<6} {price:<6} {open - price:<6}")
// ... rest of the code ...
|
f468ea8123768a3f66621bfecae20814fa83017b
|
website_sale_clear_line/controllers/main.py
|
website_sale_clear_line/controllers/main.py
|
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
|
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
|
FIX website sale clear line
|
FIX website sale clear line
|
Python
|
agpl-3.0
|
ingadhoc/website
|
python
|
## Code Before:
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
## Instruction:
FIX website sale clear line
## Code After:
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
|
// ... existing code ...
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
// ... modified code ...
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
// ... rest of the code ...
|
6b84688c1b5a7f2e8c9e5007455b88cbaa845e9f
|
tests/test_track_output/results.py
|
tests/test_track_output/results.py
|
import os
import sys
import glob
import shutil
from subprocess import call
# If vtk python module is not available, we can't run track.py so skip this
# test
cwd = os.getcwd()
try:
import vtk
except ImportError:
print('----------------Skipping test-------------')
shutil.copy('results_true.dat', 'results_test.dat')
exit()
# Run track processing script
call(['../../track.py', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
shutil.copy('poly.pvtp', 'results_test.dat')
|
import os
import sys
import glob
import shutil
from subprocess import call
# If vtk python module is not available, we can't run track.py so skip this
# test
cwd = os.getcwd()
try:
import vtk
except ImportError:
print('----------------Skipping test-------------')
shutil.copy('results_true.dat', 'results_test.dat')
exit()
# Run track processing script
call(['../../scripts/openmc-track-to-vtk', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
shutil.copy('poly.pvtp', 'results_test.dat')
|
Fix path to script in test_track_output
|
Fix path to script in test_track_output
|
Python
|
mit
|
mjlong/openmc,wbinventor/openmc,bhermanmit/openmc,wbinventor/openmc,johnnyliu27/openmc,paulromano/openmc,mjlong/openmc,smharper/openmc,lilulu/openmc,johnnyliu27/openmc,amandalund/openmc,samuelshaner/openmc,mit-crpg/openmc,shikhar413/openmc,johnnyliu27/openmc,mit-crpg/openmc,liangjg/openmc,kellyrowland/openmc,amandalund/openmc,walshjon/openmc,lilulu/openmc,liangjg/openmc,liangjg/openmc,johnnyliu27/openmc,samuelshaner/openmc,mit-crpg/openmc,amandalund/openmc,smharper/openmc,shikhar413/openmc,samuelshaner/openmc,mit-crpg/openmc,shikhar413/openmc,paulromano/openmc,liangjg/openmc,smharper/openmc,shikhar413/openmc,samuelshaner/openmc,paulromano/openmc,walshjon/openmc,walshjon/openmc,smharper/openmc,bhermanmit/openmc,wbinventor/openmc,wbinventor/openmc,walshjon/openmc,paulromano/openmc,lilulu/openmc,amandalund/openmc,kellyrowland/openmc
|
python
|
## Code Before:
import os
import sys
import glob
import shutil
from subprocess import call
# If vtk python module is not available, we can't run track.py so skip this
# test
cwd = os.getcwd()
try:
import vtk
except ImportError:
print('----------------Skipping test-------------')
shutil.copy('results_true.dat', 'results_test.dat')
exit()
# Run track processing script
call(['../../track.py', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
shutil.copy('poly.pvtp', 'results_test.dat')
## Instruction:
Fix path to script in test_track_output
## Code After:
import os
import sys
import glob
import shutil
from subprocess import call
# If vtk python module is not available, we can't run track.py so skip this
# test
cwd = os.getcwd()
try:
import vtk
except ImportError:
print('----------------Skipping test-------------')
shutil.copy('results_true.dat', 'results_test.dat')
exit()
# Run track processing script
call(['../../scripts/openmc-track-to-vtk', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
shutil.copy('poly.pvtp', 'results_test.dat')
|
# ... existing code ...
exit()
# Run track processing script
call(['../../scripts/openmc-track-to-vtk', '-o', 'poly'] +
glob.glob(''.join((cwd, '/track*'))))
poly = ''.join((cwd, '/poly.pvtp'))
assert os.path.isfile(poly), 'poly.pvtp file not found.'
# ... rest of the code ...
|
cd08af338b1a1bd139c8fcde51ce8d336fd1ab80
|
app/src/main/java/de/philipphager/disclosure/feature/app/detail/DetailView.java
|
app/src/main/java/de/philipphager/disclosure/feature/app/detail/DetailView.java
|
package de.philipphager.disclosure.feature.app.detail;
import de.philipphager.disclosure.database.library.model.Library;
import de.philipphager.disclosure.feature.navigation.Navigates;
import de.philipphager.disclosure.util.ui.components.ScoreView;
import java.util.List;
public interface DetailView extends Navigates {
void notify(String message);
void setToolbarTitle(String title);
void setAppIcon(String packageName);
void setLibraries(List<Library> libraries);
void setScore(ScoreView.Score score);
}
|
package de.philipphager.disclosure.feature.app.detail;
import android.content.Intent;
import de.philipphager.disclosure.database.library.model.Library;
import de.philipphager.disclosure.feature.navigation.Navigates;
import de.philipphager.disclosure.util.ui.components.ScoreView;
import java.util.List;
public interface DetailView extends Navigates {
void notify(String message);
void setToolbarTitle(String title);
void setAppIcon(String packageName);
void setLibraries(List<Library> libraries);
void setScore(ScoreView.Score score);
void showEditPermissionsTutorial(String packageName);
void showRuntimePermissionsTutorial(String packageName);
void enableEditPermissions(boolean isEnabled);
void startActivityForResult(Intent intent, int requestCode);
void finish();
}
|
Update view interface for app detail activity
|
Update view interface for app detail activity
|
Java
|
apache-2.0
|
philipphager/disclosure-android-app
|
java
|
## Code Before:
package de.philipphager.disclosure.feature.app.detail;
import de.philipphager.disclosure.database.library.model.Library;
import de.philipphager.disclosure.feature.navigation.Navigates;
import de.philipphager.disclosure.util.ui.components.ScoreView;
import java.util.List;
public interface DetailView extends Navigates {
void notify(String message);
void setToolbarTitle(String title);
void setAppIcon(String packageName);
void setLibraries(List<Library> libraries);
void setScore(ScoreView.Score score);
}
## Instruction:
Update view interface for app detail activity
## Code After:
package de.philipphager.disclosure.feature.app.detail;
import android.content.Intent;
import de.philipphager.disclosure.database.library.model.Library;
import de.philipphager.disclosure.feature.navigation.Navigates;
import de.philipphager.disclosure.util.ui.components.ScoreView;
import java.util.List;
public interface DetailView extends Navigates {
void notify(String message);
void setToolbarTitle(String title);
void setAppIcon(String packageName);
void setLibraries(List<Library> libraries);
void setScore(ScoreView.Score score);
void showEditPermissionsTutorial(String packageName);
void showRuntimePermissionsTutorial(String packageName);
void enableEditPermissions(boolean isEnabled);
void startActivityForResult(Intent intent, int requestCode);
void finish();
}
|
...
package de.philipphager.disclosure.feature.app.detail;
import android.content.Intent;
import de.philipphager.disclosure.database.library.model.Library;
import de.philipphager.disclosure.feature.navigation.Navigates;
import de.philipphager.disclosure.util.ui.components.ScoreView;
...
void setLibraries(List<Library> libraries);
void setScore(ScoreView.Score score);
void showEditPermissionsTutorial(String packageName);
void showRuntimePermissionsTutorial(String packageName);
void enableEditPermissions(boolean isEnabled);
void startActivityForResult(Intent intent, int requestCode);
void finish();
}
...
|
4903afcec3d22d046c39a5b565366dc13472c6fd
|
zosimus/chartchemy/utils.py
|
zosimus/chartchemy/utils.py
|
import simplejson
from django.utils.html import escape
def render_highcharts_options(render_to, categories, series, title, x_axis_title, y_axis_title, series_name):
"""Accepts the parameters to render a chart and returns a JSON serialized Highcharts options object."""
# Escape all the character strings to make them HTML safe.
render_to = escape(render_to) if render_to else render_to
title = escape(title) if title else title
x_axis_title = escape(x_axis_title) if x_axis_title else x_axis_title
y_axis_title = escape(y_axis_title) if y_axis_title else y_axis_title
# Categories (dimensions) come from the use. Escape them too.
categories = [escape(c) for c in categories]
hco = {
"chart": {
"renderTo": render_to,
"type": 'column'
},
"title": {
"text": title
},
"xAxis": {
"title": {
"text": x_axis_title
},
"categories": categories
},
"yAxis": {
"title": {
"text": y_axis_title,
}
},
"series": [{
"name": series_name,
"data": series,
}]
}
return simplejson.dumps(hco, use_decimal=True)
|
import simplejson
from django.utils.html import escape
def render_highcharts_options(render_to, categories, series, title, x_axis_title, y_axis_title, series_name):
"""Accepts the parameters to render a chart and returns a JSON serialized Highcharts options object."""
# Escape all the character strings to make them HTML safe.
render_to = escape(render_to.encode('ascii', 'ignore')) if render_to else 'render_to'
title = escape(title.encode('ascii', 'ignore')) if title else 'title'
x_axis_title = escape(x_axis_title.encode('ascii', 'ignore')) if x_axis_title else 'x axis'
y_axis_title = escape(y_axis_title.encode('ascii', 'ignore')) if y_axis_title else 'y axis'
# Categories (dimensions) come from the use. Escape them too.
categories = [escape(c.encode('ascii', 'ignore')) for c in categories]
hco = {
"chart": {
"renderTo": render_to,
"type": 'column'
},
"title": {
"text": title
},
"xAxis": {
"title": {
"text": x_axis_title
},
"categories": categories
},
"yAxis": {
"title": {
"text": y_axis_title,
}
},
"series": [{
"name": series_name,
"data": series,
}]
}
return simplejson.dumps(hco, use_decimal=True)
|
Fix unicode error in series
|
Fix unicode error in series
|
Python
|
bsd-2-clause
|
pgollakota/zosimus,pgollakota/zosimus
|
python
|
## Code Before:
import simplejson
from django.utils.html import escape
def render_highcharts_options(render_to, categories, series, title, x_axis_title, y_axis_title, series_name):
"""Accepts the parameters to render a chart and returns a JSON serialized Highcharts options object."""
# Escape all the character strings to make them HTML safe.
render_to = escape(render_to) if render_to else render_to
title = escape(title) if title else title
x_axis_title = escape(x_axis_title) if x_axis_title else x_axis_title
y_axis_title = escape(y_axis_title) if y_axis_title else y_axis_title
# Categories (dimensions) come from the use. Escape them too.
categories = [escape(c) for c in categories]
hco = {
"chart": {
"renderTo": render_to,
"type": 'column'
},
"title": {
"text": title
},
"xAxis": {
"title": {
"text": x_axis_title
},
"categories": categories
},
"yAxis": {
"title": {
"text": y_axis_title,
}
},
"series": [{
"name": series_name,
"data": series,
}]
}
return simplejson.dumps(hco, use_decimal=True)
## Instruction:
Fix unicode error in series
## Code After:
import simplejson
from django.utils.html import escape
def render_highcharts_options(render_to, categories, series, title, x_axis_title, y_axis_title, series_name):
"""Accepts the parameters to render a chart and returns a JSON serialized Highcharts options object."""
# Escape all the character strings to make them HTML safe.
render_to = escape(render_to.encode('ascii', 'ignore')) if render_to else 'render_to'
title = escape(title.encode('ascii', 'ignore')) if title else 'title'
x_axis_title = escape(x_axis_title.encode('ascii', 'ignore')) if x_axis_title else 'x axis'
y_axis_title = escape(y_axis_title.encode('ascii', 'ignore')) if y_axis_title else 'y axis'
# Categories (dimensions) come from the use. Escape them too.
categories = [escape(c.encode('ascii', 'ignore')) for c in categories]
hco = {
"chart": {
"renderTo": render_to,
"type": 'column'
},
"title": {
"text": title
},
"xAxis": {
"title": {
"text": x_axis_title
},
"categories": categories
},
"yAxis": {
"title": {
"text": y_axis_title,
}
},
"series": [{
"name": series_name,
"data": series,
}]
}
return simplejson.dumps(hco, use_decimal=True)
|
// ... existing code ...
"""Accepts the parameters to render a chart and returns a JSON serialized Highcharts options object."""
# Escape all the character strings to make them HTML safe.
render_to = escape(render_to.encode('ascii', 'ignore')) if render_to else 'render_to'
title = escape(title.encode('ascii', 'ignore')) if title else 'title'
x_axis_title = escape(x_axis_title.encode('ascii', 'ignore')) if x_axis_title else 'x axis'
y_axis_title = escape(y_axis_title.encode('ascii', 'ignore')) if y_axis_title else 'y axis'
# Categories (dimensions) come from the use. Escape them too.
categories = [escape(c.encode('ascii', 'ignore')) for c in categories]
hco = {
"chart": {
// ... rest of the code ...
|
efbcd8104470234e50ad2e40719b0edf1fbc45c4
|
zou/app/utils/date_helpers.py
|
zou/app/utils/date_helpers.py
|
from datetime import date, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
|
from babel.dates import format_datetime
from datetime import date, datetime, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
def get_date_string_with_timezone(date_string, timezone):
"""
Apply given timezone to given date and return it as a string.
"""
date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
return format_datetime(
date_obj,
"YYYY-MM-DDTHH:mm:ss",
tzinfo=timezone
)
|
Add helper to handle timezone in date strings
|
[utils] Add helper to handle timezone in date strings
|
Python
|
agpl-3.0
|
cgwire/zou
|
python
|
## Code Before:
from datetime import date, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
## Instruction:
[utils] Add helper to handle timezone in date strings
## Code After:
from babel.dates import format_datetime
from datetime import date, datetime, timedelta
def get_date_from_now(nb_days):
return date.today() - timedelta(days=nb_days)
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
def get_date_string_with_timezone(date_string, timezone):
"""
Apply given timezone to given date and return it as a string.
"""
date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
return format_datetime(
date_obj,
"YYYY-MM-DDTHH:mm:ss",
tzinfo=timezone
)
|
...
from babel.dates import format_datetime
from datetime import date, datetime, timedelta
def get_date_from_now(nb_days):
...
def get_date_diff(date_a, date_b):
return abs((date_b - date_a).total_seconds())
def get_date_string_with_timezone(date_string, timezone):
"""
Apply given timezone to given date and return it as a string.
"""
date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
return format_datetime(
date_obj,
"YYYY-MM-DDTHH:mm:ss",
tzinfo=timezone
)
...
|
1fc71e1d3a95fb02b91c503cff1a9df7ae6531df
|
src/com/facebook/buck/cxx/AbstractClangCxxCompilationDatabaseEntry.java
|
src/com/facebook/buck/cxx/AbstractClangCxxCompilationDatabaseEntry.java
|
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.util.Escaper;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.immutables.value.Value;
@BuckStyleImmutable
@Value.Immutable
abstract class AbstractClangCxxCompilationDatabaseEntry implements CxxCompilationDatabaseEntry {
@Value.Parameter
public abstract String getDirectory();
@Override
@Value.Parameter
public abstract String getFile();
@JsonIgnore
@Value.Parameter
public abstract ImmutableList<String> getArgs();
@Override
@Value.Derived
public String getCommand() {
return Joiner.on(' ').join(
Iterables.transform(
getArgs(),
Escaper.SHELL_ESCAPER));
}
}
|
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.util.Escaper;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.immutables.value.Value;
@BuckStyleImmutable
@Value.Immutable
abstract class AbstractClangCxxCompilationDatabaseEntry implements CxxCompilationDatabaseEntry {
@Value.Parameter
public abstract String getDirectory();
@Override
@Value.Parameter
public abstract String getFile();
@Value.Parameter
public abstract ImmutableList<String> getArguments();
@Override
@Value.Derived
public String getCommand() {
return Joiner.on(' ').join(
Iterables.transform(
getArguments(),
Escaper.SHELL_ESCAPER));
}
}
|
Add arguments to clang entries.
|
Add arguments to clang entries.
Summary:
http://reviews.llvm.org/rL245036 added a new attribute to clang
compilation database entries, `arguments`.
Test Plan:
CI
create a compilation database with clang format, see it has `arguments`
|
Java
|
apache-2.0
|
OkBuilds/buck,daedric/buck,OkBuilds/buck,tgummerer/buck,Dominator008/buck,zhan-xiong/buck,rmaz/buck,marcinkwiatkowski/buck,brettwooldridge/buck,illicitonion/buck,tgummerer/buck,Addepar/buck,ilya-klyuchnikov/buck,shs96c/buck,dsyang/buck,illicitonion/buck,mogers/buck,liuyang-li/buck,sdwilsh/buck,k21/buck,Dominator008/buck,robbertvanginkel/buck,mikekap/buck,SeleniumHQ/buck,tgummerer/buck,janicduplessis/buck,daedric/buck,tgummerer/buck,nguyentruongtho/buck,romanoid/buck,daedric/buck,rhencke/buck,kageiit/buck,nguyentruongtho/buck,facebook/buck,shybovycha/buck,LegNeato/buck,liuyang-li/buck,vine/buck,kageiit/buck,OkBuilds/buck,liuyang-li/buck,mikekap/buck,darkforestzero/buck,Distrotech/buck,JoelMarcey/buck,raviagarwal7/buck,justinmuller/buck,sdwilsh/buck,rhencke/buck,mogers/buck,rmaz/buck,zhan-xiong/buck,marcinkwiatkowski/buck,romanoid/buck,mogers/buck,liuyang-li/buck,rhencke/buck,robbertvanginkel/buck,nguyentruongtho/buck,zhan-xiong/buck,ilya-klyuchnikov/buck,davido/buck,liuyang-li/buck,mogers/buck,marcinkwiatkowski/buck,OkBuilds/buck,vine/buck,grumpyjames/buck,bocon13/buck,rhencke/buck,ilya-klyuchnikov/buck,Addepar/buck,dsyang/buck,raviagarwal7/buck,OkBuilds/buck,brettwooldridge/buck,zpao/buck,tgummerer/buck,grumpyjames/buck,marcinkwiatkowski/buck,k21/buck,shybovycha/buck,Distrotech/buck,robbertvanginkel/buck,k21/buck,justinmuller/buck,shybovycha/buck,JoelMarcey/buck,facebook/buck,justinmuller/buck,vschs007/buck,clonetwin26/buck,k21/buck,shybovycha/buck,brettwooldridge/buck,rmaz/buck,dsyang/buck,raviagarwal7/buck,vschs007/buck,mogers/buck,rhencke/buck,shybovycha/buck,grumpyjames/buck,Dominator008/buck,clonetwin26/buck,justinmuller/buck,SeleniumHQ/buck,janicduplessis/buck,Addepar/buck,ilya-klyuchnikov/buck,daedric/buck,vine/buck,mikekap/buck,clonetwin26/buck,grumpyjames/buck,Dominator008/buck,OkBuilds/buck,Dominator008/buck,JoelMarcey/buck,rowillia/buck,raviagarwal7/buck,illicitonion/buck,darkforestzero/buck,janicduplessis/buck,vschs007/buck,JoelMarcey/buck,rowillia/buck,tgummerer/buck,SeleniumHQ/buck,brettwooldridge/buck,Addepar/buck,mikekap/buck,kageiit/buck,vschs007/buck,illicitonion/buck,darkforestzero/buck,vine/buck,LegNeato/buck,OkBuilds/buck,Distrotech/buck,ilya-klyuchnikov/buck,justinmuller/buck,clonetwin26/buck,justinmuller/buck,zhan-xiong/buck,romanoid/buck,clonetwin26/buck,Dominator008/buck,tgummerer/buck,romanoid/buck,dsyang/buck,SeleniumHQ/buck,romanoid/buck,shs96c/buck,illicitonion/buck,dsyang/buck,zhan-xiong/buck,mikekap/buck,rmaz/buck,rowillia/buck,shs96c/buck,nguyentruongtho/buck,SeleniumHQ/buck,daedric/buck,shs96c/buck,romanoid/buck,robbertvanginkel/buck,raviagarwal7/buck,k21/buck,ilya-klyuchnikov/buck,clonetwin26/buck,sdwilsh/buck,Addepar/buck,brettwooldridge/buck,marcinkwiatkowski/buck,robbertvanginkel/buck,SeleniumHQ/buck,daedric/buck,zhan-xiong/buck,shs96c/buck,Distrotech/buck,kageiit/buck,JoelMarcey/buck,darkforestzero/buck,rowillia/buck,clonetwin26/buck,mogers/buck,vschs007/buck,dsyang/buck,zhan-xiong/buck,ilya-klyuchnikov/buck,JoelMarcey/buck,SeleniumHQ/buck,vine/buck,marcinkwiatkowski/buck,JoelMarcey/buck,bocon13/buck,rmaz/buck,romanoid/buck,davido/buck,rmaz/buck,illicitonion/buck,marcinkwiatkowski/buck,justinmuller/buck,justinmuller/buck,rowillia/buck,rmaz/buck,bocon13/buck,grumpyjames/buck,rowillia/buck,OkBuilds/buck,rowillia/buck,janicduplessis/buck,marcinkwiatkowski/buck,mikekap/buck,janicduplessis/buck,vschs007/buck,vine/buck,raviagarwal7/buck,daedric/buck,rmaz/buck,Addepar/buck,facebook/buck,rhencke/buck,Dominator008/buck,romanoid/buck,liuyang-li/buck,grumpyjames/buck,brettwooldridge/buck,shs96c/buck,rhencke/buck,k21/buck,Distrotech/buck,darkforestzero/buck,clonetwin26/buck,Addepar/buck,robbertvanginkel/buck,vschs007/buck,janicduplessis/buck,sdwilsh/buck,SeleniumHQ/buck,rmaz/buck,grumpyjames/buck,dsyang/buck,kageiit/buck,zhan-xiong/buck,davido/buck,rmaz/buck,raviagarwal7/buck,liuyang-li/buck,zhan-xiong/buck,vschs007/buck,marcinkwiatkowski/buck,LegNeato/buck,davido/buck,raviagarwal7/buck,OkBuilds/buck,rowillia/buck,rhencke/buck,mogers/buck,janicduplessis/buck,nguyentruongtho/buck,mogers/buck,raviagarwal7/buck,facebook/buck,rowillia/buck,sdwilsh/buck,davido/buck,zhan-xiong/buck,sdwilsh/buck,shybovycha/buck,JoelMarcey/buck,ilya-klyuchnikov/buck,shybovycha/buck,brettwooldridge/buck,justinmuller/buck,brettwooldridge/buck,darkforestzero/buck,raviagarwal7/buck,LegNeato/buck,robbertvanginkel/buck,LegNeato/buck,shybovycha/buck,vine/buck,liuyang-li/buck,davido/buck,k21/buck,rowillia/buck,nguyentruongtho/buck,SeleniumHQ/buck,grumpyjames/buck,mikekap/buck,liuyang-li/buck,shybovycha/buck,darkforestzero/buck,Dominator008/buck,illicitonion/buck,Dominator008/buck,romanoid/buck,zhan-xiong/buck,raviagarwal7/buck,daedric/buck,illicitonion/buck,marcinkwiatkowski/buck,shs96c/buck,k21/buck,shybovycha/buck,illicitonion/buck,liuyang-li/buck,janicduplessis/buck,rowillia/buck,shs96c/buck,facebook/buck,k21/buck,robbertvanginkel/buck,davido/buck,davido/buck,illicitonion/buck,Addepar/buck,shs96c/buck,illicitonion/buck,k21/buck,illicitonion/buck,sdwilsh/buck,marcinkwiatkowski/buck,bocon13/buck,vschs007/buck,bocon13/buck,darkforestzero/buck,clonetwin26/buck,shybovycha/buck,bocon13/buck,sdwilsh/buck,dsyang/buck,Distrotech/buck,Distrotech/buck,mogers/buck,vine/buck,vschs007/buck,sdwilsh/buck,janicduplessis/buck,OkBuilds/buck,Addepar/buck,clonetwin26/buck,shs96c/buck,mogers/buck,daedric/buck,brettwooldridge/buck,justinmuller/buck,shs96c/buck,LegNeato/buck,zpao/buck,JoelMarcey/buck,rowillia/buck,Addepar/buck,darkforestzero/buck,tgummerer/buck,romanoid/buck,SeleniumHQ/buck,kageiit/buck,justinmuller/buck,facebook/buck,brettwooldridge/buck,marcinkwiatkowski/buck,marcinkwiatkowski/buck,dsyang/buck,vschs007/buck,bocon13/buck,rhencke/buck,davido/buck,raviagarwal7/buck,vschs007/buck,daedric/buck,zpao/buck,nguyentruongtho/buck,sdwilsh/buck,zpao/buck,illicitonion/buck,daedric/buck,JoelMarcey/buck,Addepar/buck,OkBuilds/buck,LegNeato/buck,daedric/buck,JoelMarcey/buck,Dominator008/buck,shybovycha/buck,vine/buck,Addepar/buck,grumpyjames/buck,darkforestzero/buck,dsyang/buck,bocon13/buck,robbertvanginkel/buck,OkBuilds/buck,robbertvanginkel/buck,ilya-klyuchnikov/buck,romanoid/buck,brettwooldridge/buck,rmaz/buck,romanoid/buck,davido/buck,rhencke/buck,ilya-klyuchnikov/buck,Distrotech/buck,liuyang-li/buck,JoelMarcey/buck,OkBuilds/buck,shs96c/buck,Distrotech/buck,SeleniumHQ/buck,clonetwin26/buck,mikekap/buck,bocon13/buck,grumpyjames/buck,tgummerer/buck,Distrotech/buck,shybovycha/buck,sdwilsh/buck,ilya-klyuchnikov/buck,Dominator008/buck,davido/buck,janicduplessis/buck,zpao/buck,zhan-xiong/buck,sdwilsh/buck,vine/buck,mogers/buck,sdwilsh/buck,SeleniumHQ/buck,clonetwin26/buck,justinmuller/buck,rmaz/buck,raviagarwal7/buck,LegNeato/buck,romanoid/buck,darkforestzero/buck,darkforestzero/buck,bocon13/buck,zhan-xiong/buck,LegNeato/buck,dsyang/buck,brettwooldridge/buck,mikekap/buck,tgummerer/buck,grumpyjames/buck,Dominator008/buck,LegNeato/buck,k21/buck,robbertvanginkel/buck,k21/buck,darkforestzero/buck,brettwooldridge/buck,vschs007/buck,vine/buck,k21/buck,SeleniumHQ/buck,ilya-klyuchnikov/buck,mikekap/buck,facebook/buck,Addepar/buck,tgummerer/buck,grumpyjames/buck,davido/buck,davido/buck,dsyang/buck,mikekap/buck,ilya-klyuchnikov/buck,kageiit/buck,zpao/buck,mikekap/buck,shs96c/buck,rhencke/buck,LegNeato/buck,LegNeato/buck,bocon13/buck,robbertvanginkel/buck,robbertvanginkel/buck,justinmuller/buck,janicduplessis/buck,LegNeato/buck,clonetwin26/buck,rmaz/buck,zpao/buck,bocon13/buck,Distrotech/buck,JoelMarcey/buck,janicduplessis/buck,dsyang/buck,daedric/buck
|
java
|
## Code Before:
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.util.Escaper;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.immutables.value.Value;
@BuckStyleImmutable
@Value.Immutable
abstract class AbstractClangCxxCompilationDatabaseEntry implements CxxCompilationDatabaseEntry {
@Value.Parameter
public abstract String getDirectory();
@Override
@Value.Parameter
public abstract String getFile();
@JsonIgnore
@Value.Parameter
public abstract ImmutableList<String> getArgs();
@Override
@Value.Derived
public String getCommand() {
return Joiner.on(' ').join(
Iterables.transform(
getArgs(),
Escaper.SHELL_ESCAPER));
}
}
## Instruction:
Add arguments to clang entries.
Summary:
http://reviews.llvm.org/rL245036 added a new attribute to clang
compilation database entries, `arguments`.
Test Plan:
CI
create a compilation database with clang format, see it has `arguments`
## Code After:
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.util.Escaper;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.immutables.value.Value;
@BuckStyleImmutable
@Value.Immutable
abstract class AbstractClangCxxCompilationDatabaseEntry implements CxxCompilationDatabaseEntry {
@Value.Parameter
public abstract String getDirectory();
@Override
@Value.Parameter
public abstract String getFile();
@Value.Parameter
public abstract ImmutableList<String> getArguments();
@Override
@Value.Derived
public String getCommand() {
return Joiner.on(' ').join(
Iterables.transform(
getArguments(),
Escaper.SHELL_ESCAPER));
}
}
|
// ... existing code ...
import com.facebook.buck.util.Escaper;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
// ... modified code ...
@Value.Parameter
public abstract String getFile();
@Value.Parameter
public abstract ImmutableList<String> getArguments();
@Override
@Value.Derived
...
public String getCommand() {
return Joiner.on(' ').join(
Iterables.transform(
getArguments(),
Escaper.SHELL_ESCAPER));
}
// ... rest of the code ...
|
95788f09949e83cf39588444b44eda55e13c6071
|
wluopensource/accounts/models.py
|
wluopensource/accounts/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class UserProfile(models.Model):
user = models.ForeignKey(User, blank=True, unique=True)
url = models.URLField("Website", blank=True, verify_exists=False)
def __unicode__(self):
return self.user.username
def profile_creation_handler(sender, **kwargs):
if kwargs.get('created', False):
UserProfile.objects.get_or_create(user=kwargs['instance'])
post_save.connect(profile_creation_handler, sender=User)
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class UserProfile(models.Model):
user = models.ForeignKey(User, blank=True, unique=True)
url = models.URLField("Website", blank=True)
def __unicode__(self):
return self.user.username
def profile_creation_handler(sender, **kwargs):
if kwargs.get('created', False):
UserProfile.objects.get_or_create(user=kwargs['instance'])
post_save.connect(profile_creation_handler, sender=User)
|
Remove verify false from user URL to match up with comment URL
|
Remove verify false from user URL to match up with comment URL
|
Python
|
bsd-3-clause
|
jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website
|
python
|
## Code Before:
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class UserProfile(models.Model):
user = models.ForeignKey(User, blank=True, unique=True)
url = models.URLField("Website", blank=True, verify_exists=False)
def __unicode__(self):
return self.user.username
def profile_creation_handler(sender, **kwargs):
if kwargs.get('created', False):
UserProfile.objects.get_or_create(user=kwargs['instance'])
post_save.connect(profile_creation_handler, sender=User)
## Instruction:
Remove verify false from user URL to match up with comment URL
## Code After:
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
class UserProfile(models.Model):
user = models.ForeignKey(User, blank=True, unique=True)
url = models.URLField("Website", blank=True)
def __unicode__(self):
return self.user.username
def profile_creation_handler(sender, **kwargs):
if kwargs.get('created', False):
UserProfile.objects.get_or_create(user=kwargs['instance'])
post_save.connect(profile_creation_handler, sender=User)
|
...
class UserProfile(models.Model):
user = models.ForeignKey(User, blank=True, unique=True)
url = models.URLField("Website", blank=True)
def __unicode__(self):
return self.user.username
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.