commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
4077e12bc198681cd0ec55c075f51d34588646da
|
HtmlFlow/src/main/java/htmlflow/HtmlWriter.java
|
HtmlFlow/src/main/java/htmlflow/HtmlWriter.java
|
package htmlflow;
import java.io.PrintStream;
public interface HtmlWriter<T>{
/**
* Writes into an internal PrintStream the HTML content
* of this element with initial indentation of zero.
*
* @param model An optional object model that could be bind to this element.
*/
default void write(T model) {
write(0, model);
}
/**
* Writes into an internal PrintStream the HTML content
* of this element.
*
* @param depth The number of tabs indentation.
* @param model An optional object model that could be bind to this element.
*/
void write(int depth, T model);
/**
* Sets the current PrintStream.
* @param out
*/
HtmlWriter<T> setPrintStream(PrintStream out);
}
|
package htmlflow;
import java.io.PrintStream;
public interface HtmlWriter<T>{
/**
* Writes into an internal PrintStream the HTML content
* of this element with zero indentation and without a
* domain object.
*/
default void write() {
write(0, null);
}
/**
* Writes into an internal PrintStream the HTML content
* of this element with initial indentation of zero.
*
* @param model An optional object model that could be bind to this element.
*/
default void write(T model) {
write(0, model);
}
/**
* Writes into an internal PrintStream the HTML content
* of this element.
*
* @param depth The number of tabs indentation.
* @param model An optional object model that could be bind to this element.
*/
void write(int depth, T model);
/**
* Sets the current PrintStream.
* @param out
*/
HtmlWriter<T> setPrintStream(PrintStream out);
}
|
Add a parameterless write() method
|
Add a parameterless write() method
|
Java
|
mit
|
fmcarvalho/HtmlFlow,fmcarvalho/HtmlFlow
|
java
|
## Code Before:
package htmlflow;
import java.io.PrintStream;
public interface HtmlWriter<T>{
/**
* Writes into an internal PrintStream the HTML content
* of this element with initial indentation of zero.
*
* @param model An optional object model that could be bind to this element.
*/
default void write(T model) {
write(0, model);
}
/**
* Writes into an internal PrintStream the HTML content
* of this element.
*
* @param depth The number of tabs indentation.
* @param model An optional object model that could be bind to this element.
*/
void write(int depth, T model);
/**
* Sets the current PrintStream.
* @param out
*/
HtmlWriter<T> setPrintStream(PrintStream out);
}
## Instruction:
Add a parameterless write() method
## Code After:
package htmlflow;
import java.io.PrintStream;
public interface HtmlWriter<T>{
/**
* Writes into an internal PrintStream the HTML content
* of this element with zero indentation and without a
* domain object.
*/
default void write() {
write(0, null);
}
/**
* Writes into an internal PrintStream the HTML content
* of this element with initial indentation of zero.
*
* @param model An optional object model that could be bind to this element.
*/
default void write(T model) {
write(0, model);
}
/**
* Writes into an internal PrintStream the HTML content
* of this element.
*
* @param depth The number of tabs indentation.
* @param model An optional object model that could be bind to this element.
*/
void write(int depth, T model);
/**
* Sets the current PrintStream.
* @param out
*/
HtmlWriter<T> setPrintStream(PrintStream out);
}
|
// ... existing code ...
import java.io.PrintStream;
public interface HtmlWriter<T>{
/**
* Writes into an internal PrintStream the HTML content
* of this element with zero indentation and without a
* domain object.
*/
default void write() {
write(0, null);
}
/**
* Writes into an internal PrintStream the HTML content
* of this element with initial indentation of zero.
// ... rest of the code ...
|
d6912d7453bd128aafb9ee8634782b26427a42a4
|
src/dashboard/src/main/templatetags/active.py
|
src/dashboard/src/main/templatetags/active.py
|
from django.template import Library
import math
register = Library()
@register.simple_tag
def active(request, pattern):
if request.path.startswith(pattern) and pattern != '/':
return 'active'
elif request.path == pattern == '/':
return 'active'
|
from django.template import Library
import math
register = Library()
@register.simple_tag
def active(request, pattern):
if request.path.startswith(pattern) and pattern != '/':
return 'active'
elif request.path == pattern == '/':
return 'active'
else:
return ''
|
Return sth in every case
|
Return sth in every case
Autoconverted from SVN (revision:1844)
|
Python
|
agpl-3.0
|
artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history
|
python
|
## Code Before:
from django.template import Library
import math
register = Library()
@register.simple_tag
def active(request, pattern):
if request.path.startswith(pattern) and pattern != '/':
return 'active'
elif request.path == pattern == '/':
return 'active'
## Instruction:
Return sth in every case
Autoconverted from SVN (revision:1844)
## Code After:
from django.template import Library
import math
register = Library()
@register.simple_tag
def active(request, pattern):
if request.path.startswith(pattern) and pattern != '/':
return 'active'
elif request.path == pattern == '/':
return 'active'
else:
return ''
|
# ... existing code ...
return 'active'
elif request.path == pattern == '/':
return 'active'
else:
return ''
# ... rest of the code ...
|
8a4295876a4e1059f46f8fadaa1562062bfe877e
|
tests/test_edge_cases.py
|
tests/test_edge_cases.py
|
from __future__ import with_statement
import unittest
from flask import Flask
import flask_featureflags as feature_flags
class TestOutsideRequestContext(unittest.TestCase):
def test_checking_is_active_outside_request_context_returns_false(self):
self.assertFalse(feature_flags.is_active("BOGUS_FEATURE_FLAG"))
def test_default_handler_returns_false_outside_request_context(self):
self.assertFalse(feature_flags.AppConfigFlagHandler("BOGUS_FEATURE_FLAG"))
class TestBadlyConfiguredApplication(unittest.TestCase):
def test_checking_is_active_on_an_app_that_was_never_set_up_raises_assertion(self):
# This simulates somebody calling is_active on a Flask app that was never
# set up with this extension. Since this is somebody likely trying to install it,
# make sure they get a nice, helpful error message
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertRaises(AssertionError, feature_flags.is_active, "BOGUS_FEATURE_FLAG")
def test_running_default_handler_on_app_that_was_never_set_up_returns_false(self):
# This case should only happen if somebody's being especially creative, but
# I want to make sure it's well-behaved anyways.
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertFalse(feature_flags.AppConfigFlagHandler("BOGUS_FEATURE_FLAG"))
|
from __future__ import with_statement
import unittest
from flask import Flask
import flask_featureflags as feature_flags
class TestOutsideRequestContext(unittest.TestCase):
def test_checking_is_active_outside_request_context_returns_false(self):
self.assertFalse(feature_flags.is_active("BOGUS_FEATURE_FLAG"))
def test_default_handler_returns_false_outside_request_context(self):
self.assertFalse(feature_flags.AppConfigFlagHandler("BOGUS_FEATURE_FLAG"))
class TestBadlyConfiguredApplication(unittest.TestCase):
def test_checking_is_active_on_an_app_that_was_never_set_up_raises_assertion(self):
# This simulates somebody calling is_active on a Flask app that was never
# set up with this extension. Since this is somebody likely trying to install it,
# make sure they get a nice, helpful error message
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertRaises(AssertionError, feature_flags.is_active, "BOGUS_FEATURE_FLAG")
def test_running_default_handler_on_app_that_was_never_set_up_returns_false(self):
# This case should only happen if somebody's being especially creative, but
# I want to make sure it's well-behaved anyways.
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertRaises(feature_flags.NoFeatureFlagFound,
feature_flags.AppConfigFlagHandler, "BOGUS_FEATURE_FLAG")
|
Fix test to support NoFeatureFlagFound.
|
Fix test to support NoFeatureFlagFound.
|
Python
|
apache-2.0
|
iromli/Flask-FeatureFlags,trustrachel/Flask-FeatureFlags,jskulski/Flask-FeatureFlags
|
python
|
## Code Before:
from __future__ import with_statement
import unittest
from flask import Flask
import flask_featureflags as feature_flags
class TestOutsideRequestContext(unittest.TestCase):
def test_checking_is_active_outside_request_context_returns_false(self):
self.assertFalse(feature_flags.is_active("BOGUS_FEATURE_FLAG"))
def test_default_handler_returns_false_outside_request_context(self):
self.assertFalse(feature_flags.AppConfigFlagHandler("BOGUS_FEATURE_FLAG"))
class TestBadlyConfiguredApplication(unittest.TestCase):
def test_checking_is_active_on_an_app_that_was_never_set_up_raises_assertion(self):
# This simulates somebody calling is_active on a Flask app that was never
# set up with this extension. Since this is somebody likely trying to install it,
# make sure they get a nice, helpful error message
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertRaises(AssertionError, feature_flags.is_active, "BOGUS_FEATURE_FLAG")
def test_running_default_handler_on_app_that_was_never_set_up_returns_false(self):
# This case should only happen if somebody's being especially creative, but
# I want to make sure it's well-behaved anyways.
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertFalse(feature_flags.AppConfigFlagHandler("BOGUS_FEATURE_FLAG"))
## Instruction:
Fix test to support NoFeatureFlagFound.
## Code After:
from __future__ import with_statement
import unittest
from flask import Flask
import flask_featureflags as feature_flags
class TestOutsideRequestContext(unittest.TestCase):
def test_checking_is_active_outside_request_context_returns_false(self):
self.assertFalse(feature_flags.is_active("BOGUS_FEATURE_FLAG"))
def test_default_handler_returns_false_outside_request_context(self):
self.assertFalse(feature_flags.AppConfigFlagHandler("BOGUS_FEATURE_FLAG"))
class TestBadlyConfiguredApplication(unittest.TestCase):
def test_checking_is_active_on_an_app_that_was_never_set_up_raises_assertion(self):
# This simulates somebody calling is_active on a Flask app that was never
# set up with this extension. Since this is somebody likely trying to install it,
# make sure they get a nice, helpful error message
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertRaises(AssertionError, feature_flags.is_active, "BOGUS_FEATURE_FLAG")
def test_running_default_handler_on_app_that_was_never_set_up_returns_false(self):
# This case should only happen if somebody's being especially creative, but
# I want to make sure it's well-behaved anyways.
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertRaises(feature_flags.NoFeatureFlagFound,
feature_flags.AppConfigFlagHandler, "BOGUS_FEATURE_FLAG")
|
...
test_app = Flask(__name__)
with test_app.test_request_context("/"):
self.assertRaises(feature_flags.NoFeatureFlagFound,
feature_flags.AppConfigFlagHandler, "BOGUS_FEATURE_FLAG")
...
|
f3dd0c94c0c7be2a5ebc2c0df59dd9fb15969eb9
|
ghpythonremote/_configure_ironpython_installation.py
|
ghpythonremote/_configure_ironpython_installation.py
|
import sys
import pip
from .helpers import get_rhino_ironpython_path
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary :all:', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
import sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
Correct --no-binary option, incorrect formatting in pypi doc
|
Correct --no-binary option, incorrect formatting in pypi doc
|
Python
|
mit
|
Digital-Structures/ghpythonremote,pilcru/ghpythonremote
|
python
|
## Code Before:
import sys
import pip
from .helpers import get_rhino_ironpython_path
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary :all:', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
## Instruction:
Correct --no-binary option, incorrect formatting in pypi doc
## Code After:
import sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
// ... existing code ...
import sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
// ... modified code ...
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
// ... rest of the code ...
|
882a0864f4342f90ceb3c14312c66cba39a4a627
|
src/main/java/ch/hood/App.java
|
src/main/java/ch/hood/App.java
|
package ch.hood;
import static ch.hood.jooq.schema1.Tables.A;
import static ch.hood.jooq.schema2.Tables.B;
import static org.jooq.impl.DSL.select;
public class App {
public static void main(String[] args) {
System.out.println(
select(A.ID, A.FLAG)
.from(A)
.join(B).on(B.NAME.eq(A.NAME))
.toString());
}
}
|
package ch.hood;
import static ch.hood.jooq.schema1.Schema1.SCHEMA1;
import static ch.hood.jooq.schema1.Tables.A;
import static ch.hood.jooq.schema2.Schema2.SCHEMA2;
import static ch.hood.jooq.schema2.Tables.B;
import static java.lang.Boolean.FALSE;
import static java.lang.Boolean.TRUE;
import static org.jooq.impl.DSL.select;
import ch.hood.jooq.schema1.Schema1;
import ch.hood.jooq.schema2.Schema2;
import org.jooq.Configuration;
import org.jooq.DSLContext;
import org.jooq.SQLDialect;
import org.jooq.conf.MappedSchema;
import org.jooq.conf.RenderMapping;
import org.jooq.conf.RenderNameStyle;
import org.jooq.conf.Settings;
import org.jooq.impl.DSL;
import org.jooq.impl.DefaultConfiguration;
/**
* The main methods expects either no arguments or 2 arguments. When run without arguments, we assume a HQSLDB;
* when run with 2 arguments, we expect the names of the schema1 and the schema2 for a Postgresql (or Oracle) instance.
*/
public class App {
public static void main(String[] args) {
SQLDialect sqlDialect = args.length == 0 ? SQLDialect.HSQLDB : SQLDialect.POSTGRES; // SQLDialect.ORACLE
Settings settings = new Settings()
.withRenderFormatted(true)
.withRenderSchema(TRUE)
.withRenderNameStyle(RenderNameStyle.UPPER);
if (sqlDialect == SQLDialect.POSTGRES) {
String schema1Name = args[0];
String schema2Name = args[1];
settings.withRenderMapping(new RenderMapping()
.withSchemata(
new MappedSchema().withInput(SCHEMA1.getName()).withOutput(schema1Name),
new MappedSchema().withInput(SCHEMA2.getName()).withOutput(schema2Name)));
}
Configuration config = new DefaultConfiguration()
.set(sqlDialect)
.set(settings);
Configuration configuration = config;
DSLContext dsl = DSL.using(configuration);
System.out.println(
dsl.select(A.ID, A.FLAG)
.from(A)
.join(B).on(B.NAME.eq(A.NAME))
.toString());
}
}
|
Add Code Example with Schema Mapping.
|
Add Code Example with Schema Mapping.
|
Java
|
apache-2.0
|
stanislas/jooq-with-liquibase
|
java
|
## Code Before:
package ch.hood;
import static ch.hood.jooq.schema1.Tables.A;
import static ch.hood.jooq.schema2.Tables.B;
import static org.jooq.impl.DSL.select;
public class App {
public static void main(String[] args) {
System.out.println(
select(A.ID, A.FLAG)
.from(A)
.join(B).on(B.NAME.eq(A.NAME))
.toString());
}
}
## Instruction:
Add Code Example with Schema Mapping.
## Code After:
package ch.hood;
import static ch.hood.jooq.schema1.Schema1.SCHEMA1;
import static ch.hood.jooq.schema1.Tables.A;
import static ch.hood.jooq.schema2.Schema2.SCHEMA2;
import static ch.hood.jooq.schema2.Tables.B;
import static java.lang.Boolean.FALSE;
import static java.lang.Boolean.TRUE;
import static org.jooq.impl.DSL.select;
import ch.hood.jooq.schema1.Schema1;
import ch.hood.jooq.schema2.Schema2;
import org.jooq.Configuration;
import org.jooq.DSLContext;
import org.jooq.SQLDialect;
import org.jooq.conf.MappedSchema;
import org.jooq.conf.RenderMapping;
import org.jooq.conf.RenderNameStyle;
import org.jooq.conf.Settings;
import org.jooq.impl.DSL;
import org.jooq.impl.DefaultConfiguration;
/**
* The main methods expects either no arguments or 2 arguments. When run without arguments, we assume a HQSLDB;
* when run with 2 arguments, we expect the names of the schema1 and the schema2 for a Postgresql (or Oracle) instance.
*/
public class App {
public static void main(String[] args) {
SQLDialect sqlDialect = args.length == 0 ? SQLDialect.HSQLDB : SQLDialect.POSTGRES; // SQLDialect.ORACLE
Settings settings = new Settings()
.withRenderFormatted(true)
.withRenderSchema(TRUE)
.withRenderNameStyle(RenderNameStyle.UPPER);
if (sqlDialect == SQLDialect.POSTGRES) {
String schema1Name = args[0];
String schema2Name = args[1];
settings.withRenderMapping(new RenderMapping()
.withSchemata(
new MappedSchema().withInput(SCHEMA1.getName()).withOutput(schema1Name),
new MappedSchema().withInput(SCHEMA2.getName()).withOutput(schema2Name)));
}
Configuration config = new DefaultConfiguration()
.set(sqlDialect)
.set(settings);
Configuration configuration = config;
DSLContext dsl = DSL.using(configuration);
System.out.println(
dsl.select(A.ID, A.FLAG)
.from(A)
.join(B).on(B.NAME.eq(A.NAME))
.toString());
}
}
|
...
package ch.hood;
import static ch.hood.jooq.schema1.Schema1.SCHEMA1;
import static ch.hood.jooq.schema1.Tables.A;
import static ch.hood.jooq.schema2.Schema2.SCHEMA2;
import static ch.hood.jooq.schema2.Tables.B;
import static java.lang.Boolean.FALSE;
import static java.lang.Boolean.TRUE;
import static org.jooq.impl.DSL.select;
import ch.hood.jooq.schema1.Schema1;
import ch.hood.jooq.schema2.Schema2;
import org.jooq.Configuration;
import org.jooq.DSLContext;
import org.jooq.SQLDialect;
import org.jooq.conf.MappedSchema;
import org.jooq.conf.RenderMapping;
import org.jooq.conf.RenderNameStyle;
import org.jooq.conf.Settings;
import org.jooq.impl.DSL;
import org.jooq.impl.DefaultConfiguration;
/**
* The main methods expects either no arguments or 2 arguments. When run without arguments, we assume a HQSLDB;
* when run with 2 arguments, we expect the names of the schema1 and the schema2 for a Postgresql (or Oracle) instance.
*/
public class App {
public static void main(String[] args) {
SQLDialect sqlDialect = args.length == 0 ? SQLDialect.HSQLDB : SQLDialect.POSTGRES; // SQLDialect.ORACLE
Settings settings = new Settings()
.withRenderFormatted(true)
.withRenderSchema(TRUE)
.withRenderNameStyle(RenderNameStyle.UPPER);
if (sqlDialect == SQLDialect.POSTGRES) {
String schema1Name = args[0];
String schema2Name = args[1];
settings.withRenderMapping(new RenderMapping()
.withSchemata(
new MappedSchema().withInput(SCHEMA1.getName()).withOutput(schema1Name),
new MappedSchema().withInput(SCHEMA2.getName()).withOutput(schema2Name)));
}
Configuration config = new DefaultConfiguration()
.set(sqlDialect)
.set(settings);
Configuration configuration = config;
DSLContext dsl = DSL.using(configuration);
System.out.println(
dsl.select(A.ID, A.FLAG)
.from(A)
.join(B).on(B.NAME.eq(A.NAME))
.toString());
...
|
6611ff0042232589522e2b0f8bfe0f30b67e6d5e
|
IRCCloud/config.h
|
IRCCloud/config.h
|
//
// config.h
// IRCCloud
//
// Created by Sam Steele on 7/13/13.
// Copyright (c) 2013 IRCCloud, Ltd. All rights reserved.
//
#ifndef IRCCloud_config_h
#define IRCCloud_config_h
#define HOCKEYAPP_TOKEN nil
#define CRASHLYTICS_TOKEN nil
#define CRASHLYTICS_SECRET nil
#endif
|
//
// config.h
// IRCCloud
//
// Created by Sam Steele on 7/13/13.
// Copyright (c) 2013 IRCCloud, Ltd. All rights reserved.
//
#ifndef IRCCloud_config_h
#define IRCCloud_config_h
#endif
|
Remove nil tokens from conf
|
Remove nil tokens from conf
|
C
|
apache-2.0
|
irccloud/ios,irccloud/ios,irccloud/ios,iOSTestApps/irccloud-ios,DreamHill/ios,irccloud/ios,irccloud/ios
|
c
|
## Code Before:
//
// config.h
// IRCCloud
//
// Created by Sam Steele on 7/13/13.
// Copyright (c) 2013 IRCCloud, Ltd. All rights reserved.
//
#ifndef IRCCloud_config_h
#define IRCCloud_config_h
#define HOCKEYAPP_TOKEN nil
#define CRASHLYTICS_TOKEN nil
#define CRASHLYTICS_SECRET nil
#endif
## Instruction:
Remove nil tokens from conf
## Code After:
//
// config.h
// IRCCloud
//
// Created by Sam Steele on 7/13/13.
// Copyright (c) 2013 IRCCloud, Ltd. All rights reserved.
//
#ifndef IRCCloud_config_h
#define IRCCloud_config_h
#endif
|
// ... existing code ...
#ifndef IRCCloud_config_h
#define IRCCloud_config_h
#endif
// ... rest of the code ...
|
86fc1b91aeb9dc17b5776ef372050c707c64fc30
|
setup.py
|
setup.py
|
from distutils.core import setup
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Natural Language :: English',
]
with open("README.rst") as f:
LONG_DESCRIPTION = ''.join(f.readlines())
setup(
name="properties",
version="0.2.3",
packages=find_packages(exclude=('tests',)),
install_requires=[
'future',
'numpy>=1.7',
'six',
'vectormath>=0.1.0',
],
author="3point Science",
author_email="[email protected]",
description="properties",
long_description=LONG_DESCRIPTION,
keywords="property",
url="http://steno3d.com/",
download_url="http://github.com/3ptscience/properties",
classifiers=CLASSIFIERS,
platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
use_2to3=False,
)
|
from distutils.core import setup
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Natural Language :: English',
]
with open("README.rst") as f:
LONG_DESCRIPTION = ''.join(f.readlines())
setup(
name="properties",
version="0.2.3",
packages=find_packages(exclude=('tests',)),
install_requires=[
'numpy>=1.7',
'six',
'vectormath>=0.1.1',
],
author="3point Science",
author_email="[email protected]",
description="properties",
long_description=LONG_DESCRIPTION,
keywords="property",
url="http://steno3d.com/",
download_url="http://github.com/3ptscience/properties",
classifiers=CLASSIFIERS,
platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
use_2to3=False,
)
|
Remove future, bump vectormath dependencies
|
Remove future, bump vectormath dependencies
|
Python
|
mit
|
3ptscience/properties,aranzgeo/properties
|
python
|
## Code Before:
from distutils.core import setup
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Natural Language :: English',
]
with open("README.rst") as f:
LONG_DESCRIPTION = ''.join(f.readlines())
setup(
name="properties",
version="0.2.3",
packages=find_packages(exclude=('tests',)),
install_requires=[
'future',
'numpy>=1.7',
'six',
'vectormath>=0.1.0',
],
author="3point Science",
author_email="[email protected]",
description="properties",
long_description=LONG_DESCRIPTION,
keywords="property",
url="http://steno3d.com/",
download_url="http://github.com/3ptscience/properties",
classifiers=CLASSIFIERS,
platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
use_2to3=False,
)
## Instruction:
Remove future, bump vectormath dependencies
## Code After:
from distutils.core import setup
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Natural Language :: English',
]
with open("README.rst") as f:
LONG_DESCRIPTION = ''.join(f.readlines())
setup(
name="properties",
version="0.2.3",
packages=find_packages(exclude=('tests',)),
install_requires=[
'numpy>=1.7',
'six',
'vectormath>=0.1.1',
],
author="3point Science",
author_email="[email protected]",
description="properties",
long_description=LONG_DESCRIPTION,
keywords="property",
url="http://steno3d.com/",
download_url="http://github.com/3ptscience/properties",
classifiers=CLASSIFIERS,
platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
use_2to3=False,
)
|
...
version="0.2.3",
packages=find_packages(exclude=('tests',)),
install_requires=[
'numpy>=1.7',
'six',
'vectormath>=0.1.1',
],
author="3point Science",
author_email="[email protected]",
...
|
3749acbad597974ef2507b2e7e27240937658c0b
|
nilmtk/plots.py
|
nilmtk/plots.py
|
from __future__ import print_function, division
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
_to_ordinalf_np_vectorized = np.vectorize(mdates._to_ordinalf)
def plot_series(series, ax=None, label=None, date_format='%d/%m/%y %H:%M:%S', **kwargs):
"""Plot function for series which is about 5 times faster than
pd.Series.plot().
Parameters
----------
ax : matplotlib Axes, optional
If not provided then will generate our own axes.
label : str, optional
The label for the plotted line. The
caller is responsible for enabling the legend.
date_format : str, optional, default='%d/%m/%y %H:%M:%S'
"""
if ax is None:
ax = plt.gca()
ax.xaxis.axis_date(tz=series.index.tzinfo)
ax.xaxis.set_major_formatter(mdates.DateFormatter(date_format))
x = _to_ordinalf_np_vectorized(series.index.to_pydatetime())
ax.plot(x, series, label=label, **kwargs)
ax.set_ylabel('watts')
return ax
|
from __future__ import print_function, division
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
_to_ordinalf_np_vectorized = np.vectorize(mdates._to_ordinalf)
def plot_series(series, ax=None, label=None, date_format='%d/%m/%y %H:%M:%S', **kwargs):
"""Plot function for series which is about 5 times faster than
pd.Series.plot().
Parameters
----------
ax : matplotlib Axes, optional
If not provided then will generate our own axes.
label : str, optional
The label for the plotted line. The
caller is responsible for enabling the legend.
date_format : str, optional, default='%d/%m/%y %H:%M:%S'
"""
if ax is None:
fig, ax = plt.subplots(1)
x = _to_ordinalf_np_vectorized(series.index.to_pydatetime())
ax.plot(x, series, label=label, **kwargs)
ax.xaxis.set_major_formatter(mdates.DateFormatter(date_format,
tz=series.index.tzinfo))
ax.set_ylabel('watts')
fig.autofmt_xdate()
plt.draw()
return ax
|
Fix bug where timezone was not used for xaxis.
|
Fix bug where timezone was not used for xaxis.
|
Python
|
apache-2.0
|
jaduimstra/nilmtk,josemao/nilmtk,pauldeng/nilmtk,AlexRobson/nilmtk,mmottahedi/nilmtk,nilmtk/nilmtk,nilmtk/nilmtk,HarllanAndrye/nilmtk
|
python
|
## Code Before:
from __future__ import print_function, division
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
_to_ordinalf_np_vectorized = np.vectorize(mdates._to_ordinalf)
def plot_series(series, ax=None, label=None, date_format='%d/%m/%y %H:%M:%S', **kwargs):
"""Plot function for series which is about 5 times faster than
pd.Series.plot().
Parameters
----------
ax : matplotlib Axes, optional
If not provided then will generate our own axes.
label : str, optional
The label for the plotted line. The
caller is responsible for enabling the legend.
date_format : str, optional, default='%d/%m/%y %H:%M:%S'
"""
if ax is None:
ax = plt.gca()
ax.xaxis.axis_date(tz=series.index.tzinfo)
ax.xaxis.set_major_formatter(mdates.DateFormatter(date_format))
x = _to_ordinalf_np_vectorized(series.index.to_pydatetime())
ax.plot(x, series, label=label, **kwargs)
ax.set_ylabel('watts')
return ax
## Instruction:
Fix bug where timezone was not used for xaxis.
## Code After:
from __future__ import print_function, division
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
_to_ordinalf_np_vectorized = np.vectorize(mdates._to_ordinalf)
def plot_series(series, ax=None, label=None, date_format='%d/%m/%y %H:%M:%S', **kwargs):
"""Plot function for series which is about 5 times faster than
pd.Series.plot().
Parameters
----------
ax : matplotlib Axes, optional
If not provided then will generate our own axes.
label : str, optional
The label for the plotted line. The
caller is responsible for enabling the legend.
date_format : str, optional, default='%d/%m/%y %H:%M:%S'
"""
if ax is None:
fig, ax = plt.subplots(1)
x = _to_ordinalf_np_vectorized(series.index.to_pydatetime())
ax.plot(x, series, label=label, **kwargs)
ax.xaxis.set_major_formatter(mdates.DateFormatter(date_format,
tz=series.index.tzinfo))
ax.set_ylabel('watts')
fig.autofmt_xdate()
plt.draw()
return ax
|
# ... existing code ...
date_format : str, optional, default='%d/%m/%y %H:%M:%S'
"""
if ax is None:
fig, ax = plt.subplots(1)
x = _to_ordinalf_np_vectorized(series.index.to_pydatetime())
ax.plot(x, series, label=label, **kwargs)
ax.xaxis.set_major_formatter(mdates.DateFormatter(date_format,
tz=series.index.tzinfo))
ax.set_ylabel('watts')
fig.autofmt_xdate()
plt.draw()
return ax
# ... rest of the code ...
|
14035edcca3b6d3046cf5af06030dae61746b87b
|
byte-buddy-dep/src/test/java/net/bytebuddy/description/type/PackageDescriptionForLoadedPackageTest.java
|
byte-buddy-dep/src/test/java/net/bytebuddy/description/type/PackageDescriptionForLoadedPackageTest.java
|
package net.bytebuddy.description.type;
public class PackageDescriptionForLoadedPackageTest extends AbstractPackageDescriptionTest {
protected PackageDescription describe(Class<?> type) {
return new PackageDescription.ForLoadedPackage(type.getPackage());
}
}
|
package net.bytebuddy.description.type;
public class PackageDescriptionForLoadedPackageTest extends AbstractPackageDescriptionTest {
protected PackageDescription describe(Class<?> type) {
return TypeDescription.ForLoadedType.of(type).getPackage();
}
}
|
Fix test for loaded package description.
|
Fix test for loaded package description.
|
Java
|
apache-2.0
|
raphw/byte-buddy,raphw/byte-buddy,raphw/byte-buddy
|
java
|
## Code Before:
package net.bytebuddy.description.type;
public class PackageDescriptionForLoadedPackageTest extends AbstractPackageDescriptionTest {
protected PackageDescription describe(Class<?> type) {
return new PackageDescription.ForLoadedPackage(type.getPackage());
}
}
## Instruction:
Fix test for loaded package description.
## Code After:
package net.bytebuddy.description.type;
public class PackageDescriptionForLoadedPackageTest extends AbstractPackageDescriptionTest {
protected PackageDescription describe(Class<?> type) {
return TypeDescription.ForLoadedType.of(type).getPackage();
}
}
|
# ... existing code ...
public class PackageDescriptionForLoadedPackageTest extends AbstractPackageDescriptionTest {
protected PackageDescription describe(Class<?> type) {
return TypeDescription.ForLoadedType.of(type).getPackage();
}
}
# ... rest of the code ...
|
4b54a8a038d5f9f2ead224b030f87f393d57d40b
|
tests/__init__.py
|
tests/__init__.py
|
import os
import sys
import unittest
import warnings
from test.support import run_unittest
here = os.path.dirname(__file__) or os.curdir
def test_suite():
old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources which adds a
# warnings filter.
warnings.filters[:] = old_filters
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
import os
import sys
import unittest
from test.support import run_unittest
from test.support.warnings_helper import save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
Fix test_copyreg when numpy is installed (GH-20935)
|
bpo-41003: Fix test_copyreg when numpy is installed (GH-20935)
Fix test_copyreg when numpy is installed: test.pickletester now
saves/restores warnings.filters when importing numpy, to ignore
filters installed by numpy.
Add the save_restore_warnings_filters() function to the
test.support.warnings_helper module.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
python
|
## Code Before:
import os
import sys
import unittest
import warnings
from test.support import run_unittest
here = os.path.dirname(__file__) or os.curdir
def test_suite():
old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources which adds a
# warnings filter.
warnings.filters[:] = old_filters
return suite
if __name__ == "__main__":
run_unittest(test_suite())
## Instruction:
bpo-41003: Fix test_copyreg when numpy is installed (GH-20935)
Fix test_copyreg when numpy is installed: test.pickletester now
saves/restores warnings.filters when importing numpy, to ignore
filters installed by numpy.
Add the save_restore_warnings_filters() function to the
test.support.warnings_helper module.
## Code After:
import os
import sys
import unittest
from test.support import run_unittest
from test.support.warnings_helper import save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
// ... existing code ...
import os
import sys
import unittest
from test.support import run_unittest
from test.support.warnings_helper import save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
// ... modified code ...
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
// ... rest of the code ...
|
6a13df5cd32ccb76181feb93dd3a160cc7f728b7
|
src/test/java/tars/testutil/TaskBuilder.java
|
src/test/java/tars/testutil/TaskBuilder.java
|
package tars.testutil;
import tars.commons.exceptions.IllegalValueException;
import tars.model.task.*;
import tars.model.tag.Tag;
/**
*
*/
public class TaskBuilder {
private TestTask task;
public TaskBuilder() {
this.task = new TestTask();
}
public TaskBuilder withName(String name) throws IllegalValueException {
this.task.setName(new Name(name));
return this;
}
public TaskBuilder withTags(String ... tags) throws IllegalValueException {
for (String tag: tags) {
task.getTags().add(new Tag(tag));
}
return this;
}
public TaskBuilder withAddress(String address) throws IllegalValueException {
this.task.setAddress(new Address(address));
return this;
}
public TaskBuilder withPhone(String phone) throws IllegalValueException {
this.task.setPhone(new Phone(phone));
return this;
}
public TaskBuilder withEmail(String email) throws IllegalValueException {
this.task.setEmail(new Email(email));
return this;
}
public TestTask build() {
return this.task;
}
}
|
package tars.testutil;
import tars.commons.exceptions.IllegalValueException;
import tars.model.task.*;
import tars.model.tag.Tag;
/**
*
*/
public class TaskBuilder {
private TestTask task;
public TaskBuilder() {
this.task = new TestTask();
}
public TaskBuilder withName(String name) throws IllegalValueException {
this.task.setName(new Name(name));
return this;
}
public TaskBuilder withTags(String ... tags) throws IllegalValueException {
for (String tag: tags) {
task.getTags().add(new Tag(tag));
}
return this;
}
public TaskBuilder withDateTime(String dateTime) throws IllegalValueException {
this.task.setDateTime(new DateTime(dateTime, ""));
return this;
}
public TaskBuilder withPriority(String priority) throws IllegalValueException {
this.task.setPriority(new Priority(priority));
return this;
}
public TestTask build() {
return this.task;
}
}
|
Add methods withDateTime(String) and withPriority(String)
|
Add methods withDateTime(String) and withPriority(String)
|
Java
|
mit
|
CS2103AUG2016-F10-C1/main,CS2103AUG2016-F10-C1/main,CS2103AUG2016-F10-C1/main
|
java
|
## Code Before:
package tars.testutil;
import tars.commons.exceptions.IllegalValueException;
import tars.model.task.*;
import tars.model.tag.Tag;
/**
*
*/
public class TaskBuilder {
private TestTask task;
public TaskBuilder() {
this.task = new TestTask();
}
public TaskBuilder withName(String name) throws IllegalValueException {
this.task.setName(new Name(name));
return this;
}
public TaskBuilder withTags(String ... tags) throws IllegalValueException {
for (String tag: tags) {
task.getTags().add(new Tag(tag));
}
return this;
}
public TaskBuilder withAddress(String address) throws IllegalValueException {
this.task.setAddress(new Address(address));
return this;
}
public TaskBuilder withPhone(String phone) throws IllegalValueException {
this.task.setPhone(new Phone(phone));
return this;
}
public TaskBuilder withEmail(String email) throws IllegalValueException {
this.task.setEmail(new Email(email));
return this;
}
public TestTask build() {
return this.task;
}
}
## Instruction:
Add methods withDateTime(String) and withPriority(String)
## Code After:
package tars.testutil;
import tars.commons.exceptions.IllegalValueException;
import tars.model.task.*;
import tars.model.tag.Tag;
/**
*
*/
public class TaskBuilder {
private TestTask task;
public TaskBuilder() {
this.task = new TestTask();
}
public TaskBuilder withName(String name) throws IllegalValueException {
this.task.setName(new Name(name));
return this;
}
public TaskBuilder withTags(String ... tags) throws IllegalValueException {
for (String tag: tags) {
task.getTags().add(new Tag(tag));
}
return this;
}
public TaskBuilder withDateTime(String dateTime) throws IllegalValueException {
this.task.setDateTime(new DateTime(dateTime, ""));
return this;
}
public TaskBuilder withPriority(String priority) throws IllegalValueException {
this.task.setPriority(new Priority(priority));
return this;
}
public TestTask build() {
return this.task;
}
}
|
# ... existing code ...
return this;
}
public TaskBuilder withDateTime(String dateTime) throws IllegalValueException {
this.task.setDateTime(new DateTime(dateTime, ""));
return this;
}
public TaskBuilder withPriority(String priority) throws IllegalValueException {
this.task.setPriority(new Priority(priority));
return this;
}
# ... rest of the code ...
|
7b2de0a0a54ade4977015ae44cc57dc9eac1ed5b
|
client/src/main/java/org/realityforge/replicant/client/EntityLocator.java
|
client/src/main/java/org/realityforge/replicant/client/EntityLocator.java
|
package org.realityforge.replicant.client;
import arez.component.NoSuchEntityException;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* The service interface for looking up entities by type+id.
* This is used from within the entities during linking phase.
*/
public interface EntityLocator
{
@Nonnull
default <T> T getByID( @Nonnull final Class<T> type, @Nonnull final Object id )
{
final T entity = findByID( type, id );
if ( null == entity )
{
throw new NoSuchEntityException( id );
}
return entity;
}
/**
* Lookup an entity of specified type with specified id, returning null if not present.
*
* @param type the type of the entity.
* @param id the id of the entity.
* @param <T> the entity type.
* @return the entity or null if no such entity.
*/
@Nullable
<T> T findByID( @Nonnull Class<T> type, @Nonnull Object id );
}
|
package org.realityforge.replicant.client;
import arez.component.NoSuchEntityException;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* The service interface for looking up entities by type+id.
* This is used from within the entities during linking phase.
*/
public interface EntityLocator
{
@Nonnull
default <T> T getByID( @Nonnull final Class<T> type, @Nonnull final Object id )
{
final T entity = findById( type, id );
if ( null == entity )
{
throw new NoSuchEntityException( id );
}
return entity;
}
/**
* Lookup an entity of specified type with specified id, returning null if not present.
*
* @param type the type of the entity.
* @param id the id of the entity.
* @param <T> the entity type.
* @return the entity or null if no such entity.
*/
@Nullable
<T> T findById( @Nonnull Class<T> type, @Nonnull Object id );
}
|
Rename method findByID to findById
|
Rename method findByID to findById
|
Java
|
apache-2.0
|
realityforge/replicant,realityforge/replicant
|
java
|
## Code Before:
package org.realityforge.replicant.client;
import arez.component.NoSuchEntityException;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* The service interface for looking up entities by type+id.
* This is used from within the entities during linking phase.
*/
public interface EntityLocator
{
@Nonnull
default <T> T getByID( @Nonnull final Class<T> type, @Nonnull final Object id )
{
final T entity = findByID( type, id );
if ( null == entity )
{
throw new NoSuchEntityException( id );
}
return entity;
}
/**
* Lookup an entity of specified type with specified id, returning null if not present.
*
* @param type the type of the entity.
* @param id the id of the entity.
* @param <T> the entity type.
* @return the entity or null if no such entity.
*/
@Nullable
<T> T findByID( @Nonnull Class<T> type, @Nonnull Object id );
}
## Instruction:
Rename method findByID to findById
## Code After:
package org.realityforge.replicant.client;
import arez.component.NoSuchEntityException;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* The service interface for looking up entities by type+id.
* This is used from within the entities during linking phase.
*/
public interface EntityLocator
{
@Nonnull
default <T> T getByID( @Nonnull final Class<T> type, @Nonnull final Object id )
{
final T entity = findById( type, id );
if ( null == entity )
{
throw new NoSuchEntityException( id );
}
return entity;
}
/**
* Lookup an entity of specified type with specified id, returning null if not present.
*
* @param type the type of the entity.
* @param id the id of the entity.
* @param <T> the entity type.
* @return the entity or null if no such entity.
*/
@Nullable
<T> T findById( @Nonnull Class<T> type, @Nonnull Object id );
}
|
...
package org.realityforge.replicant.client;
import arez.component.NoSuchEntityException;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
...
@Nonnull
default <T> T getByID( @Nonnull final Class<T> type, @Nonnull final Object id )
{
final T entity = findById( type, id );
if ( null == entity )
{
throw new NoSuchEntityException( id );
...
* @return the entity or null if no such entity.
*/
@Nullable
<T> T findById( @Nonnull Class<T> type, @Nonnull Object id );
}
...
|
1e47f79647baffd62d2a434710fe98b3c2247f28
|
tests/pgcomplex_app/models.py
|
tests/pgcomplex_app/models.py
|
from django.db import models
from django_orm.postgresql.fields.arrays import ArrayField
from django_orm.postgresql.fields.interval import IntervalField
from django_orm.postgresql.fields.bytea import ByteaField
from django_orm.postgresql.manager import PgManager
class IntModel(models.Model):
lista = ArrayField(dbtype='int')
objects = PgManager()
class TextModel(models.Model):
lista = ArrayField(dbtype='text')
objects = PgManager()
class DoubleModel(models.Model):
lista = ArrayField(dbtype='double precision')
objects = PgManager()
class VarcharModel(models.Model):
lista = ArrayField(dbtype='varchar(40)')
objects = PgManager()
class IntervalModel(models.Model):
iv = IntervalField()
objects = PgManager()
class ByteaModel(models.Model):
bb = ByteaField()
objects = PgManager()
from django_orm.postgresql.geometric.fields import PointField, CircleField
from django_orm.postgresql.geometric.fields import LsegField, BoxField
from django_orm.postgresql.geometric.fields import PathField, PolygonField
class GeomModel(models.Model):
pt = PointField()
pl = PolygonField()
ln = LsegField()
bx = BoxField()
cr = CircleField()
ph = PathField()
objects = PgManager()
|
from django.db import models
from django_orm.postgresql.fields.arrays import ArrayField
from django_orm.postgresql.fields.interval import IntervalField
from django_orm.postgresql.fields.bytea import ByteaField
from django_orm.manager import Manager
class IntModel(models.Model):
lista = ArrayField(dbtype='int')
objects = Manager()
class TextModel(models.Model):
lista = ArrayField(dbtype='text')
objects = Manager()
class DoubleModel(models.Model):
lista = ArrayField(dbtype='double precision')
objects = Manager()
class VarcharModel(models.Model):
lista = ArrayField(dbtype='varchar(40)')
objects = Manager()
class IntervalModel(models.Model):
iv = IntervalField()
objects = Manager()
class ByteaModel(models.Model):
bb = ByteaField()
objects = Manager()
from django_orm.postgresql.geometric.fields import PointField, CircleField
from django_orm.postgresql.geometric.fields import LsegField, BoxField
from django_orm.postgresql.geometric.fields import PathField, PolygonField
class GeomModel(models.Model):
pt = PointField()
pl = PolygonField()
ln = LsegField()
bx = BoxField()
cr = CircleField()
ph = PathField()
objects = Manager()
|
Fix tests with last changes on api.
|
Fix tests with last changes on api.
|
Python
|
bsd-3-clause
|
EnTeQuAk/django-orm,EnTeQuAk/django-orm
|
python
|
## Code Before:
from django.db import models
from django_orm.postgresql.fields.arrays import ArrayField
from django_orm.postgresql.fields.interval import IntervalField
from django_orm.postgresql.fields.bytea import ByteaField
from django_orm.postgresql.manager import PgManager
class IntModel(models.Model):
lista = ArrayField(dbtype='int')
objects = PgManager()
class TextModel(models.Model):
lista = ArrayField(dbtype='text')
objects = PgManager()
class DoubleModel(models.Model):
lista = ArrayField(dbtype='double precision')
objects = PgManager()
class VarcharModel(models.Model):
lista = ArrayField(dbtype='varchar(40)')
objects = PgManager()
class IntervalModel(models.Model):
iv = IntervalField()
objects = PgManager()
class ByteaModel(models.Model):
bb = ByteaField()
objects = PgManager()
from django_orm.postgresql.geometric.fields import PointField, CircleField
from django_orm.postgresql.geometric.fields import LsegField, BoxField
from django_orm.postgresql.geometric.fields import PathField, PolygonField
class GeomModel(models.Model):
pt = PointField()
pl = PolygonField()
ln = LsegField()
bx = BoxField()
cr = CircleField()
ph = PathField()
objects = PgManager()
## Instruction:
Fix tests with last changes on api.
## Code After:
from django.db import models
from django_orm.postgresql.fields.arrays import ArrayField
from django_orm.postgresql.fields.interval import IntervalField
from django_orm.postgresql.fields.bytea import ByteaField
from django_orm.manager import Manager
class IntModel(models.Model):
lista = ArrayField(dbtype='int')
objects = Manager()
class TextModel(models.Model):
lista = ArrayField(dbtype='text')
objects = Manager()
class DoubleModel(models.Model):
lista = ArrayField(dbtype='double precision')
objects = Manager()
class VarcharModel(models.Model):
lista = ArrayField(dbtype='varchar(40)')
objects = Manager()
class IntervalModel(models.Model):
iv = IntervalField()
objects = Manager()
class ByteaModel(models.Model):
bb = ByteaField()
objects = Manager()
from django_orm.postgresql.geometric.fields import PointField, CircleField
from django_orm.postgresql.geometric.fields import LsegField, BoxField
from django_orm.postgresql.geometric.fields import PathField, PolygonField
class GeomModel(models.Model):
pt = PointField()
pl = PolygonField()
ln = LsegField()
bx = BoxField()
cr = CircleField()
ph = PathField()
objects = Manager()
|
...
from django_orm.postgresql.fields.arrays import ArrayField
from django_orm.postgresql.fields.interval import IntervalField
from django_orm.postgresql.fields.bytea import ByteaField
from django_orm.manager import Manager
class IntModel(models.Model):
lista = ArrayField(dbtype='int')
objects = Manager()
class TextModel(models.Model):
lista = ArrayField(dbtype='text')
objects = Manager()
class DoubleModel(models.Model):
lista = ArrayField(dbtype='double precision')
objects = Manager()
class VarcharModel(models.Model):
lista = ArrayField(dbtype='varchar(40)')
objects = Manager()
class IntervalModel(models.Model):
iv = IntervalField()
objects = Manager()
class ByteaModel(models.Model):
bb = ByteaField()
objects = Manager()
from django_orm.postgresql.geometric.fields import PointField, CircleField
...
cr = CircleField()
ph = PathField()
objects = Manager()
...
|
a6d87b6e4dba63b0a74dc6173e90823bdb9fe070
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.1",
packages = find_packages(),
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "[email protected]",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
|
from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.17",
packages = find_packages(),
requires = ['Python (>=2.7)'],
install_requires = [
'beautifulsoup4>=4.4',
'coverage>=4',
'jupyter>=1.0.0',
'matplotlib',
'numpy>=1.10',
'pandas>=0.17',
'Pydap',
'requests>=2.8',
'seawater>=3.3',
'simpletable>=0.2',
'xray>=0.6'
],
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "[email protected]",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
|
Add install_requires packages, at version 0.1.17
|
Add install_requires packages, at version 0.1.17
|
Python
|
mit
|
MBARIMike/biofloat,MBARIMike/biofloat,biofloat/biofloat,biofloat/biofloat
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.1",
packages = find_packages(),
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "[email protected]",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
## Instruction:
Add install_requires packages, at version 0.1.17
## Code After:
from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.17",
packages = find_packages(),
requires = ['Python (>=2.7)'],
install_requires = [
'beautifulsoup4>=4.4',
'coverage>=4',
'jupyter>=1.0.0',
'matplotlib',
'numpy>=1.10',
'pandas>=0.17',
'Pydap',
'requests>=2.8',
'seawater>=3.3',
'simpletable>=0.2',
'xray>=0.6'
],
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "[email protected]",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
|
# ... existing code ...
from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.17",
packages = find_packages(),
requires = ['Python (>=2.7)'],
install_requires = [
'beautifulsoup4>=4.4',
'coverage>=4',
'jupyter>=1.0.0',
'matplotlib',
'numpy>=1.10',
'pandas>=0.17',
'Pydap',
'requests>=2.8',
'seawater>=3.3',
'simpletable>=0.2',
'xray>=0.6'
],
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
# ... rest of the code ...
|
29f6a260e49a6955dd12d354400d9ee6cfd6ddc7
|
tests/qtcore/qstatemachine_test.py
|
tests/qtcore/qstatemachine_test.py
|
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
class QStateMachineTest(unittest.TestCase):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
app = QCoreApplication([])
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, app.quit)
app.exec_()
if __name__ == '__main__':
unittest.main()
|
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
from helper import UsesQCoreApplication
class QStateMachineTest(UsesQCoreApplication):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, self.app.quit)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
Add UsesQCoreApplication in state machine test
|
Add UsesQCoreApplication in state machine test
|
Python
|
lgpl-2.1
|
M4rtinK/pyside-bb10,enthought/pyside,M4rtinK/pyside-android,PySide/PySide,IronManMark20/pyside2,PySide/PySide,M4rtinK/pyside-bb10,RobinD42/pyside,BadSingleton/pyside2,PySide/PySide,qtproject/pyside-pyside,enthought/pyside,pankajp/pyside,pankajp/pyside,M4rtinK/pyside-android,PySide/PySide,BadSingleton/pyside2,gbaty/pyside2,qtproject/pyside-pyside,enthought/pyside,enthought/pyside,RobinD42/pyside,pankajp/pyside,pankajp/pyside,enthought/pyside,M4rtinK/pyside-android,M4rtinK/pyside-bb10,enthought/pyside,gbaty/pyside2,qtproject/pyside-pyside,PySide/PySide,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,M4rtinK/pyside-android,qtproject/pyside-pyside,gbaty/pyside2,RobinD42/pyside,BadSingleton/pyside2,RobinD42/pyside,enthought/pyside,RobinD42/pyside,gbaty/pyside2,IronManMark20/pyside2,M4rtinK/pyside-bb10,IronManMark20/pyside2,RobinD42/pyside,IronManMark20/pyside2,BadSingleton/pyside2,pankajp/pyside,M4rtinK/pyside-android,BadSingleton/pyside2,IronManMark20/pyside2,M4rtinK/pyside-android,gbaty/pyside2,RobinD42/pyside,qtproject/pyside-pyside
|
python
|
## Code Before:
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
class QStateMachineTest(unittest.TestCase):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
app = QCoreApplication([])
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, app.quit)
app.exec_()
if __name__ == '__main__':
unittest.main()
## Instruction:
Add UsesQCoreApplication in state machine test
## Code After:
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
from helper import UsesQCoreApplication
class QStateMachineTest(UsesQCoreApplication):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
s3 = QFinalState()
QObject.connect(self.machine, SIGNAL("started()"), self.cb)
self.anim = QParallelAnimationGroup()
self.machine.addState(s1)
self.machine.addState(s2)
self.machine.addState(s3)
self.machine.setInitialState(s1)
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, self.app.quit)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
|
# ... existing code ...
import unittest
from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation
from helper import UsesQCoreApplication
class QStateMachineTest(UsesQCoreApplication):
def cb(self, *args):
self.assertEqual(self.machine.defaultAnimations(), [self.anim])
def testBasic(self):
self.machine = QStateMachine()
s1 = QState()
s2 = QState()
# ... modified code ...
self.machine.addDefaultAnimation(self.anim)
self.machine.start()
QTimer.singleShot(100, self.app.quit)
self.app.exec_()
if __name__ == '__main__':
unittest.main()
# ... rest of the code ...
|
5c1fc4b6ebbd2ee54318c5bc9877868858ea03ee
|
auth0/v2/authentication/base.py
|
auth0/v2/authentication/base.py
|
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def get(self, url, params={}, headers={}):
return requests.get(url=url, params=params, headers=headers).text
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
Add .get() method to AuthenticationBase
|
Add .get() method to AuthenticationBase
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
python
|
## Code Before:
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
## Instruction:
Add .get() method to AuthenticationBase
## Code After:
import json
import requests
from ..exceptions import Auth0Error
class AuthenticationBase(object):
def post(self, url, data={}, headers={}):
response = requests.post(url=url, data=json.dumps(data),
headers=headers)
return self._process_response(response)
def get(self, url, params={}, headers={}):
return requests.get(url=url, params=params, headers=headers).text
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
if 'error' in text:
raise Auth0Error(status_code=text['error'],
error_code=text['error'],
message=text['error_description'])
return text
|
// ... existing code ...
headers=headers)
return self._process_response(response)
def get(self, url, params={}, headers={}):
return requests.get(url=url, params=params, headers=headers).text
def _process_response(self, response):
text = json.loads(response.text) if response.text else {}
// ... rest of the code ...
|
758f44c05349cc37d7beb7d4077f63f4811ebcb7
|
src/main/java/in/twizmwaz/cardinal/module/modules/arrows/ArrowRunnable.java
|
src/main/java/in/twizmwaz/cardinal/module/modules/arrows/ArrowRunnable.java
|
package in.twizmwaz.cardinal.module.modules.arrows;
import in.twizmwaz.cardinal.util.MiscUtil;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Color;
import org.bukkit.entity.Arrow;
public class ArrowRunnable implements Runnable {
private final Arrow arrow;
private final float x;
private final float y;
private final float z;
private Integer taskId = null;
protected ArrowRunnable(Arrow arrow, ChatColor chatColor) {
this.arrow = arrow;
Color rgb = MiscUtil.convertChatColorToColor(chatColor);
x = (float) rgb.getRed() / 255;
y = (float) rgb.getGreen() / 255;
z = (float) rgb.getBlue() / 255;
}
public void setTask(int id) {
this.taskId = id;
}
@Override
public void run() {
if (arrow.isOnGround() || arrow.isDead()) {
ArrowModule.arrowOnGround(arrow);
Bukkit.getScheduler().cancelTask(taskId);
} else {
ArrowModule.sendArrowParticle(arrow, x, y, z);
}
}
}
|
package in.twizmwaz.cardinal.module.modules.arrows;
import in.twizmwaz.cardinal.util.MiscUtil;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Color;
import org.bukkit.entity.Arrow;
public class ArrowRunnable implements Runnable {
private static final float ZERO = 0.00001f;
private final Arrow arrow;
private final float x;
private final float y;
private final float z;
private Integer taskId = null;
protected ArrowRunnable(Arrow arrow, ChatColor chatColor) {
this.arrow = arrow;
Color rgb = MiscUtil.convertChatColorToColor(chatColor);
x = rgbToFloat(rgb.getRed());
y = rgbToFloat(rgb.getGreen());
z = rgbToFloat(rgb.getBlue());
}
private static float rgbToFloat(int i) {
if (i == 0) {
return ZERO;
}
return (float) i / 255;
}
public void setTask(int id) {
this.taskId = id;
}
@Override
public void run() {
if (arrow.isOnGround() || arrow.isDead()) {
ArrowModule.arrowOnGround(arrow);
Bukkit.getScheduler().cancelTask(taskId);
} else {
ArrowModule.sendArrowParticle(arrow, x, y, z);
}
}
}
|
Fix arrow particle color bugs
|
Fix arrow particle color bugs
|
Java
|
mit
|
iPGz/CardinalPGM,twizmwazin/CardinalPGM,Pablete1234/CardinalPGM
|
java
|
## Code Before:
package in.twizmwaz.cardinal.module.modules.arrows;
import in.twizmwaz.cardinal.util.MiscUtil;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Color;
import org.bukkit.entity.Arrow;
public class ArrowRunnable implements Runnable {
private final Arrow arrow;
private final float x;
private final float y;
private final float z;
private Integer taskId = null;
protected ArrowRunnable(Arrow arrow, ChatColor chatColor) {
this.arrow = arrow;
Color rgb = MiscUtil.convertChatColorToColor(chatColor);
x = (float) rgb.getRed() / 255;
y = (float) rgb.getGreen() / 255;
z = (float) rgb.getBlue() / 255;
}
public void setTask(int id) {
this.taskId = id;
}
@Override
public void run() {
if (arrow.isOnGround() || arrow.isDead()) {
ArrowModule.arrowOnGround(arrow);
Bukkit.getScheduler().cancelTask(taskId);
} else {
ArrowModule.sendArrowParticle(arrow, x, y, z);
}
}
}
## Instruction:
Fix arrow particle color bugs
## Code After:
package in.twizmwaz.cardinal.module.modules.arrows;
import in.twizmwaz.cardinal.util.MiscUtil;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Color;
import org.bukkit.entity.Arrow;
public class ArrowRunnable implements Runnable {
private static final float ZERO = 0.00001f;
private final Arrow arrow;
private final float x;
private final float y;
private final float z;
private Integer taskId = null;
protected ArrowRunnable(Arrow arrow, ChatColor chatColor) {
this.arrow = arrow;
Color rgb = MiscUtil.convertChatColorToColor(chatColor);
x = rgbToFloat(rgb.getRed());
y = rgbToFloat(rgb.getGreen());
z = rgbToFloat(rgb.getBlue());
}
private static float rgbToFloat(int i) {
if (i == 0) {
return ZERO;
}
return (float) i / 255;
}
public void setTask(int id) {
this.taskId = id;
}
@Override
public void run() {
if (arrow.isOnGround() || arrow.isDead()) {
ArrowModule.arrowOnGround(arrow);
Bukkit.getScheduler().cancelTask(taskId);
} else {
ArrowModule.sendArrowParticle(arrow, x, y, z);
}
}
}
|
...
import org.bukkit.entity.Arrow;
public class ArrowRunnable implements Runnable {
private static final float ZERO = 0.00001f;
private final Arrow arrow;
private final float x;
...
protected ArrowRunnable(Arrow arrow, ChatColor chatColor) {
this.arrow = arrow;
Color rgb = MiscUtil.convertChatColorToColor(chatColor);
x = rgbToFloat(rgb.getRed());
y = rgbToFloat(rgb.getGreen());
z = rgbToFloat(rgb.getBlue());
}
private static float rgbToFloat(int i) {
if (i == 0) {
return ZERO;
}
return (float) i / 255;
}
public void setTask(int id) {
...
|
5b3a3ab884fbc09f72e63123647646b6b8b39ac5
|
kim-api/src/main/java/org/kuali/rice/kim/dto/PrincipalDTO.java
|
kim-api/src/main/java/org/kuali/rice/kim/dto/PrincipalDTO.java
|
/*
* Copyright 2007 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kim.dto;
/**
* This is the Data Transfer Object (DTO) that is used for our service layer.
*
* This class represents a Principal entity in the system.
*
* @author Kuali Rice Team ([email protected])
*/
public class PrincipalDTO extends AbstractEntityBaseDTO implements java.security.Principal {
private static final long serialVersionUID = -7894319178912177679L;
private String name;
/**
* This overridden method ...
*
* @see java.security.Principal#getName()
*/
@Override
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
}
|
/*
* Copyright 2007 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kim.dto;
/**
* This is the Data Transfer Object (DTO) that is used for our service layer.
*
* This class represents a Principal entity in the system.
*
* @author Kuali Rice Team ([email protected])
*/
public class PrincipalDTO extends AbstractEntityBaseDTO implements java.security.Principal {
private static final long serialVersionUID = -7894319178912177679L;
private String name;
/**
*
* @see java.security.Principal#getName()
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
}
|
Synchronize DTOs with BOs. @Override caused problems
|
KULRICE-1857: Synchronize DTOs with BOs. @Override caused problems
|
Java
|
apache-2.0
|
bhutchinson/rice,sonamuthu/rice-1,ewestfal/rice,smith750/rice,sonamuthu/rice-1,gathreya/rice-kc,shahess/rice,ewestfal/rice-svn2git-test,jwillia/kc-rice1,geothomasp/kualico-rice-kc,bhutchinson/rice,ewestfal/rice-svn2git-test,sonamuthu/rice-1,bsmith83/rice-1,jwillia/kc-rice1,shahess/rice,ewestfal/rice,smith750/rice,rojlarge/rice-kc,UniversityOfHawaiiORS/rice,rojlarge/rice-kc,bsmith83/rice-1,geothomasp/kualico-rice-kc,bhutchinson/rice,cniesen/rice,cniesen/rice,gathreya/rice-kc,UniversityOfHawaiiORS/rice,jwillia/kc-rice1,jwillia/kc-rice1,sonamuthu/rice-1,bhutchinson/rice,jwillia/kc-rice1,shahess/rice,cniesen/rice,rojlarge/rice-kc,shahess/rice,kuali/kc-rice,smith750/rice,geothomasp/kualico-rice-kc,bsmith83/rice-1,cniesen/rice,ewestfal/rice,rojlarge/rice-kc,kuali/kc-rice,ewestfal/rice,ewestfal/rice-svn2git-test,kuali/kc-rice,UniversityOfHawaiiORS/rice,bhutchinson/rice,smith750/rice,UniversityOfHawaiiORS/rice,kuali/kc-rice,cniesen/rice,gathreya/rice-kc,shahess/rice,bsmith83/rice-1,ewestfal/rice-svn2git-test,geothomasp/kualico-rice-kc,gathreya/rice-kc,ewestfal/rice,smith750/rice,gathreya/rice-kc,rojlarge/rice-kc,kuali/kc-rice,UniversityOfHawaiiORS/rice,geothomasp/kualico-rice-kc
|
java
|
## Code Before:
/*
* Copyright 2007 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kim.dto;
/**
* This is the Data Transfer Object (DTO) that is used for our service layer.
*
* This class represents a Principal entity in the system.
*
* @author Kuali Rice Team ([email protected])
*/
public class PrincipalDTO extends AbstractEntityBaseDTO implements java.security.Principal {
private static final long serialVersionUID = -7894319178912177679L;
private String name;
/**
* This overridden method ...
*
* @see java.security.Principal#getName()
*/
@Override
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
}
## Instruction:
KULRICE-1857: Synchronize DTOs with BOs. @Override caused problems
## Code After:
/*
* Copyright 2007 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kim.dto;
/**
* This is the Data Transfer Object (DTO) that is used for our service layer.
*
* This class represents a Principal entity in the system.
*
* @author Kuali Rice Team ([email protected])
*/
public class PrincipalDTO extends AbstractEntityBaseDTO implements java.security.Principal {
private static final long serialVersionUID = -7894319178912177679L;
private String name;
/**
*
* @see java.security.Principal#getName()
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
}
|
# ... existing code ...
private String name;
/**
*
* @see java.security.Principal#getName()
*/
public String getName() {
return name;
}
# ... rest of the code ...
|
f14df4ae507f3161f00ac28648bd53f2bb0bd7c3
|
collect_district_court_case_details.py
|
collect_district_court_case_details.py
|
import datetime
import pymongo
import os
import sys
import time
from courtreader import readers
# Connect to database
client = pymongo.MongoClient(os.environ['DISTRICT_DB'])
db = client.va_district_court_cases
# Connect to District Court Reader
reader = readers.DistrictCourtReader()
reader.connect()
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': sys.argv[1],
'date_collected': {'$exists': False}
})
if case is None: break
print case['CaseNumber']
case_details = reader.get_case_details_by_number( \
case['FIPSCode'], case['CaseNumber'])
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
time.sleep(2)
print 'Finished'
|
import boto.utils
import datetime
import pymongo
import os
import sys
import time
import uuid
from courtreader import readers
# Connect to database
client = pymongo.MongoClient(os.environ['DISTRICT_DB'])
db = client.va_district_court_cases
# Connect to District Court Reader
reader = readers.DistrictCourtReader()
reader.connect()
# get some info about this process
process_id = str(uuid.uuid4())
cwd = os.getcwd()
ec2_id = None
try:
ec2_id = boto.utils.get_instance_metadata(timeout=1, num_retries=1)['instance-id']
except:
pass
# create db record for this process
db.scrapers.insert_one({
'process_id': process_id,
'cwd': cwd,
'ec2_id': ec2_id
})
fips_code = sys.argv[1]
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': fips_code,
'date_collected': {'$exists': False}
})
if case is None: break
print case['CaseNumber']
case_details = reader.get_case_details_by_number( \
case['FIPSCode'], case['CaseNumber'])
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
db.scrapers.update_one({'process_id': process_id}, {
'$set': {
'fips_code': fips_code,
'last_update': datetime.datetime.utcnow()
}
})
time.sleep(2)
db.scrapers.remove({'process_id': process_id})
db['completed_courts'].replace_one({'fips_code': fips_code}, {
'fips_code': fips_code,
'completed_time': datetime.datetime.utcnow()
}, upsert=True)
print 'Finished'
|
Save scraper settings to database
|
Save scraper settings to database
This is the first step in allowing multiple processes to run on
different servers. Coming in the next commit!
|
Python
|
mit
|
bschoenfeld/va-court-scraper,bschoenfeld/va-court-scraper
|
python
|
## Code Before:
import datetime
import pymongo
import os
import sys
import time
from courtreader import readers
# Connect to database
client = pymongo.MongoClient(os.environ['DISTRICT_DB'])
db = client.va_district_court_cases
# Connect to District Court Reader
reader = readers.DistrictCourtReader()
reader.connect()
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': sys.argv[1],
'date_collected': {'$exists': False}
})
if case is None: break
print case['CaseNumber']
case_details = reader.get_case_details_by_number( \
case['FIPSCode'], case['CaseNumber'])
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
time.sleep(2)
print 'Finished'
## Instruction:
Save scraper settings to database
This is the first step in allowing multiple processes to run on
different servers. Coming in the next commit!
## Code After:
import boto.utils
import datetime
import pymongo
import os
import sys
import time
import uuid
from courtreader import readers
# Connect to database
client = pymongo.MongoClient(os.environ['DISTRICT_DB'])
db = client.va_district_court_cases
# Connect to District Court Reader
reader = readers.DistrictCourtReader()
reader.connect()
# get some info about this process
process_id = str(uuid.uuid4())
cwd = os.getcwd()
ec2_id = None
try:
ec2_id = boto.utils.get_instance_metadata(timeout=1, num_retries=1)['instance-id']
except:
pass
# create db record for this process
db.scrapers.insert_one({
'process_id': process_id,
'cwd': cwd,
'ec2_id': ec2_id
})
fips_code = sys.argv[1]
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': fips_code,
'date_collected': {'$exists': False}
})
if case is None: break
print case['CaseNumber']
case_details = reader.get_case_details_by_number( \
case['FIPSCode'], case['CaseNumber'])
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
db.scrapers.update_one({'process_id': process_id}, {
'$set': {
'fips_code': fips_code,
'last_update': datetime.datetime.utcnow()
}
})
time.sleep(2)
db.scrapers.remove({'process_id': process_id})
db['completed_courts'].replace_one({'fips_code': fips_code}, {
'fips_code': fips_code,
'completed_time': datetime.datetime.utcnow()
}, upsert=True)
print 'Finished'
|
// ... existing code ...
import boto.utils
import datetime
import pymongo
import os
import sys
import time
import uuid
from courtreader import readers
# Connect to database
// ... modified code ...
reader = readers.DistrictCourtReader()
reader.connect()
# get some info about this process
process_id = str(uuid.uuid4())
cwd = os.getcwd()
ec2_id = None
try:
ec2_id = boto.utils.get_instance_metadata(timeout=1, num_retries=1)['instance-id']
except:
pass
# create db record for this process
db.scrapers.insert_one({
'process_id': process_id,
'cwd': cwd,
'ec2_id': ec2_id
})
fips_code = sys.argv[1]
# Fill in cases
while True:
case = db.cases.find_one({
'FIPSCode': fips_code,
'date_collected': {'$exists': False}
})
if case is None: break
...
case_details['date_collected'] = datetime.datetime.utcnow()
updated_case = dict(case.items() + case_details.items())
db.cases.replace_one({'_id': case['_id']}, updated_case)
db.scrapers.update_one({'process_id': process_id}, {
'$set': {
'fips_code': fips_code,
'last_update': datetime.datetime.utcnow()
}
})
time.sleep(2)
db.scrapers.remove({'process_id': process_id})
db['completed_courts'].replace_one({'fips_code': fips_code}, {
'fips_code': fips_code,
'completed_time': datetime.datetime.utcnow()
}, upsert=True)
print 'Finished'
// ... rest of the code ...
|
9af50ecde67e593533898040e63e6a456fc16da5
|
tests/test_style.py
|
tests/test_style.py
|
import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
except SystemExit as e:
if e.code != 0:
self.fail('Code style checks failed')
|
import logging
import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
logger = logging.getLogger('flake8')
logger.setLevel(logging.ERROR)
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
except SystemExit as e:
if e.code != 0:
self.fail('Code style checks failed')
|
Decrease noise from code-style test
|
Decrease noise from code-style test
|
Python
|
mit
|
ministryofjustice/django-zendesk-tickets,ministryofjustice/django-zendesk-tickets
|
python
|
## Code Before:
import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
except SystemExit as e:
if e.code != 0:
self.fail('Code style checks failed')
## Instruction:
Decrease noise from code-style test
## Code After:
import logging
import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
logger = logging.getLogger('flake8')
logger.setLevel(logging.ERROR)
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
except SystemExit as e:
if e.code != 0:
self.fail('Code style checks failed')
|
# ... existing code ...
import logging
import pkg_resources
import unittest
# ... modified code ...
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
logger = logging.getLogger('flake8')
logger.setLevel(logging.ERROR)
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
# ... rest of the code ...
|
858c61a5d23685b62e590d28c896002291817bb1
|
pygotham/admin/schedule.py
|
pygotham/admin/schedule.py
|
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
column_list=('day', 'rooms', 'kind', 'start', 'end'),
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
Change admin columns for slots
|
Change admin columns for slots
|
Python
|
bsd-3-clause
|
pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,djds23/pygotham-1,djds23/pygotham-1,pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,PyGotham/pygotham,PyGotham/pygotham,pathunstrom/pygotham,pathunstrom/pygotham
|
python
|
## Code Before:
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
## Instruction:
Change admin columns for slots
## Code After:
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
column_list=('day', 'rooms', 'kind', 'start', 'end'),
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
// ... existing code ...
models.Slot,
'Slots',
CATEGORY,
column_list=('day', 'rooms', 'kind', 'start', 'end'),
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
// ... rest of the code ...
|
df03a2d9543b392fb9ea9c027b93f4ed736e6788
|
pyked/_version.py
|
pyked/_version.py
|
__version_info__ = (0, 1, 1)
__version__ = '.'.join(map(str, __version_info__))
|
__version_info__ = (0, 1, 1, 'a1')
__version__ = '.'.join(map(str, __version_info__[:3]))
if len(__version_info__) == 4:
__version__ += __version_info__[-1]
|
Allow alpha versions in the versioning string
|
Allow alpha versions in the versioning string
|
Python
|
bsd-3-clause
|
bryanwweber/PyKED,pr-omethe-us/PyKED
|
python
|
## Code Before:
__version_info__ = (0, 1, 1)
__version__ = '.'.join(map(str, __version_info__))
## Instruction:
Allow alpha versions in the versioning string
## Code After:
__version_info__ = (0, 1, 1, 'a1')
__version__ = '.'.join(map(str, __version_info__[:3]))
if len(__version_info__) == 4:
__version__ += __version_info__[-1]
|
...
__version_info__ = (0, 1, 1, 'a1')
__version__ = '.'.join(map(str, __version_info__[:3]))
if len(__version_info__) == 4:
__version__ += __version_info__[-1]
...
|
abe853074edfa1d9852b872bffc59a9df5f2297c
|
src/main/java/com/jenjinstudios/io/concurrency/ExecutionTask.java
|
src/main/java/com/jenjinstudios/io/concurrency/ExecutionTask.java
|
package com.jenjinstudios.io.concurrency;
import com.jenjinstudios.io.ExecutionContext;
import com.jenjinstudios.io.Message;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
/**
* Executes ExecutableMessage objects which have been read.
*
* @author Caleb Brinkman
*/
public class ExecutionTask implements Runnable
{
private final MessageQueue messageQueue;
private final ExecutionContext executionContext;
private final Collection<Consumer<ExecutionContext>> contextualTasks;
/**
* Construct a new ExecuteTask that will execute messages from the given MessageQueue.
*
* @param messageQueue The MessageQueue.
* @param executionContext The context in which messages should execute.
* @param contextualTasks Tasks which should be invoked in synchronous fashion with the execution context.
*/
public ExecutionTask(
MessageQueue messageQueue,
ExecutionContext executionContext,
Collection<Consumer<ExecutionContext>> contextualTasks
)
{
this.messageQueue = messageQueue;
this.executionContext = executionContext;
this.contextualTasks = contextualTasks;
}
@Override
public void run() {
final List<Message> incoming = messageQueue.getIncomingAndClear();
incoming.forEach(message -> {
Message response = message.execute(executionContext);
if (response != null) {
messageQueue.queueOutgoing(response);
}
});
contextualTasks.forEach(consumer -> consumer.accept(executionContext));
}
}
|
package com.jenjinstudios.io.concurrency;
import com.jenjinstudios.io.ExecutionContext;
import com.jenjinstudios.io.Message;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
/**
* Executes ExecutableMessage objects which have been read.
*
* @author Caleb Brinkman
*/
public class ExecutionTask implements Runnable
{
private final MessageQueue messageQueue;
private final ExecutionContext executionContext;
private final Collection<Consumer<ExecutionContext>> contextualTasks;
/**
* Construct a new ExecuteTask that will execute messages from the given MessageQueue.
*
* @param messageQueue The MessageQueue.
* @param executionContext The context in which messages should execute.
* @param contextualTasks Tasks which should be invoked in synchronous fashion with the execution context.
*/
public ExecutionTask(
MessageQueue messageQueue,
ExecutionContext executionContext,
Collection<Consumer<ExecutionContext>> contextualTasks
)
{
this.messageQueue = messageQueue;
this.executionContext = executionContext;
this.contextualTasks = contextualTasks;
}
@Override
public void run() {
final List<Message> incoming = messageQueue.getIncomingAndClear();
incoming.forEach(message -> {
Message response = message.execute(executionContext);
if (response != null) {
messageQueue.queueOutgoing(response);
}
contextualTasks.forEach(consumer -> consumer.accept(executionContext));
});
}
}
|
Move contextual task execution to make more sense
|
Move contextual task execution to make more sense
|
Java
|
mit
|
floralvikings/jenjin-io
|
java
|
## Code Before:
package com.jenjinstudios.io.concurrency;
import com.jenjinstudios.io.ExecutionContext;
import com.jenjinstudios.io.Message;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
/**
* Executes ExecutableMessage objects which have been read.
*
* @author Caleb Brinkman
*/
public class ExecutionTask implements Runnable
{
private final MessageQueue messageQueue;
private final ExecutionContext executionContext;
private final Collection<Consumer<ExecutionContext>> contextualTasks;
/**
* Construct a new ExecuteTask that will execute messages from the given MessageQueue.
*
* @param messageQueue The MessageQueue.
* @param executionContext The context in which messages should execute.
* @param contextualTasks Tasks which should be invoked in synchronous fashion with the execution context.
*/
public ExecutionTask(
MessageQueue messageQueue,
ExecutionContext executionContext,
Collection<Consumer<ExecutionContext>> contextualTasks
)
{
this.messageQueue = messageQueue;
this.executionContext = executionContext;
this.contextualTasks = contextualTasks;
}
@Override
public void run() {
final List<Message> incoming = messageQueue.getIncomingAndClear();
incoming.forEach(message -> {
Message response = message.execute(executionContext);
if (response != null) {
messageQueue.queueOutgoing(response);
}
});
contextualTasks.forEach(consumer -> consumer.accept(executionContext));
}
}
## Instruction:
Move contextual task execution to make more sense
## Code After:
package com.jenjinstudios.io.concurrency;
import com.jenjinstudios.io.ExecutionContext;
import com.jenjinstudios.io.Message;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
/**
* Executes ExecutableMessage objects which have been read.
*
* @author Caleb Brinkman
*/
public class ExecutionTask implements Runnable
{
private final MessageQueue messageQueue;
private final ExecutionContext executionContext;
private final Collection<Consumer<ExecutionContext>> contextualTasks;
/**
* Construct a new ExecuteTask that will execute messages from the given MessageQueue.
*
* @param messageQueue The MessageQueue.
* @param executionContext The context in which messages should execute.
* @param contextualTasks Tasks which should be invoked in synchronous fashion with the execution context.
*/
public ExecutionTask(
MessageQueue messageQueue,
ExecutionContext executionContext,
Collection<Consumer<ExecutionContext>> contextualTasks
)
{
this.messageQueue = messageQueue;
this.executionContext = executionContext;
this.contextualTasks = contextualTasks;
}
@Override
public void run() {
final List<Message> incoming = messageQueue.getIncomingAndClear();
incoming.forEach(message -> {
Message response = message.execute(executionContext);
if (response != null) {
messageQueue.queueOutgoing(response);
}
contextualTasks.forEach(consumer -> consumer.accept(executionContext));
});
}
}
|
...
if (response != null) {
messageQueue.queueOutgoing(response);
}
contextualTasks.forEach(consumer -> consumer.accept(executionContext));
});
}
}
...
|
ecfa18b1c6d8dfd565ab625b3bb600d2d792310f
|
src/bitmessageqt/widgets.py
|
src/bitmessageqt/widgets.py
|
from PyQt4 import uic
import os.path
import sys
def resource_path(path):
try:
return os.path.join(sys._MEIPASS, path)
except:
return os.path.join(os.path.dirname(__file__), path)
def load(path, widget):
uic.loadUi(resource_path(path), widget)
|
from PyQt4 import uic
import os.path
import sys
from shared import codePath
def resource_path(resFile):
baseDir = codePath()
for subDir in ["ui", "bitmessageqt"]:
if os.path.isdir(os.path.join(baseDir, subDir)) and os.path.isfile(os.path.join(baseDir, subDir, resFile)):
return os.path.join(baseDir, subDir, resFile)
def load(resFile, widget):
uic.loadUi(resource_path(resFile), widget)
|
Change UI loading for frozen
|
Change UI loading for frozen
|
Python
|
mit
|
debguy0x/PyBitmessage,bmng-dev/PyBitmessage,hb9kns/PyBitmessage,hb9kns/PyBitmessage,timothyparez/PyBitmessage,torifier/PyBitmessage,hb9kns/PyBitmessage,debguy0x/PyBitmessage,debguy0x/PyBitmessage,torifier/PyBitmessage,timothyparez/PyBitmessage,torifier/PyBitmessage,timothyparez/PyBitmessage,bmng-dev/PyBitmessage,torifier/PyBitmessage,hb9kns/PyBitmessage,debguy0x/PyBitmessage,timothyparez/PyBitmessage,bmng-dev/PyBitmessage
|
python
|
## Code Before:
from PyQt4 import uic
import os.path
import sys
def resource_path(path):
try:
return os.path.join(sys._MEIPASS, path)
except:
return os.path.join(os.path.dirname(__file__), path)
def load(path, widget):
uic.loadUi(resource_path(path), widget)
## Instruction:
Change UI loading for frozen
## Code After:
from PyQt4 import uic
import os.path
import sys
from shared import codePath
def resource_path(resFile):
baseDir = codePath()
for subDir in ["ui", "bitmessageqt"]:
if os.path.isdir(os.path.join(baseDir, subDir)) and os.path.isfile(os.path.join(baseDir, subDir, resFile)):
return os.path.join(baseDir, subDir, resFile)
def load(resFile, widget):
uic.loadUi(resource_path(resFile), widget)
|
// ... existing code ...
from PyQt4 import uic
import os.path
import sys
from shared import codePath
def resource_path(resFile):
baseDir = codePath()
for subDir in ["ui", "bitmessageqt"]:
if os.path.isdir(os.path.join(baseDir, subDir)) and os.path.isfile(os.path.join(baseDir, subDir, resFile)):
return os.path.join(baseDir, subDir, resFile)
def load(resFile, widget):
uic.loadUi(resource_path(resFile), widget)
// ... rest of the code ...
|
234e50c105b7d7d1e77e1c392200668891130840
|
formish/__init__.py
|
formish/__init__.py
|
from formish.forms import Form
from formish.validation import FieldError, FormError, FormishError
from formish.widgets import *
from formish.util import form_in_request
|
from formish.forms import Form
from formish.validation import FieldError, FormError, FormishError, NoActionError
from formish.widgets import *
from formish.util import form_in_request
|
Add missing exception to package-level exports.
|
Add missing exception to package-level exports.
|
Python
|
bsd-3-clause
|
ish/formish,ish/formish,ish/formish
|
python
|
## Code Before:
from formish.forms import Form
from formish.validation import FieldError, FormError, FormishError
from formish.widgets import *
from formish.util import form_in_request
## Instruction:
Add missing exception to package-level exports.
## Code After:
from formish.forms import Form
from formish.validation import FieldError, FormError, FormishError, NoActionError
from formish.widgets import *
from formish.util import form_in_request
|
// ... existing code ...
from formish.forms import Form
from formish.validation import FieldError, FormError, FormishError, NoActionError
from formish.widgets import *
from formish.util import form_in_request
// ... rest of the code ...
|
afe9df97488c789740212a40fdca119e4845308c
|
src/condor_includes/condor_fix_unistd.h
|
src/condor_includes/condor_fix_unistd.h
|
/*
For some reason the g++ include files on Ultrix 4.3 fail to provide
these prototypes, even though the Ultrix 4.2 versions did...
Once again, OSF1 also chokes, unless _AES_SOURCE(?) is defined JCP
*/
#if defined(ULTRIX43) || defined(SUNOS41) || defined(OSF1)
#if defined(__cplusplus)
extern "C" {
#endif
#if defined(SUNOS41) || defined(ULTRIX43)
typedef unsigned long ssize_t;
#endif
#if defined(__STDC__) || defined(__cplusplus)
int symlink( const char *, const char * );
void *sbrk( ssize_t );
int gethostname( char *, int );
#else
int symlink();
char *sbrk();
int gethostname();
#endif
#if defined(__cplusplus)
}
#endif
#endif /* ULTRIX43 */
#endif
|
/*
For some reason the g++ include files on Ultrix 4.3 fail to provide
these prototypes, even though the Ultrix 4.2 versions did...
Once again, OSF1 also chokes, unless _AES_SOURCE(?) is defined JCP
*/
#if defined(ULTRIX43) || defined(SUNOS41) || defined(OSF1)
#if defined(__cplusplus)
extern "C" {
#endif
#if defined(SUNOS41) || defined(ULTRIX43)
typedef unsigned long ssize_t;
#endif
#if defined(__STDC__) || defined(__cplusplus)
int symlink( const char *, const char * );
void *sbrk( ssize_t );
int gethostname( char *, int );
# if defined(SUNOS41)
ssize_t read( int, void *, size_t );
ssize_t write( int, const void *, size_t );
# endif
#else
int symlink();
char *sbrk();
int gethostname();
#endif
#if defined(__cplusplus)
}
#endif
#endif /* ULTRIX43 */
#endif
|
Fix up prototypes for read() and write() on Suns.
|
Fix up prototypes for read() and write() on Suns.
|
C
|
apache-2.0
|
djw8605/condor,clalancette/condor-dcloud,zhangzhehust/htcondor,djw8605/htcondor,djw8605/condor,bbockelm/condor-network-accounting,djw8605/condor,mambelli/osg-bosco-marco,djw8605/htcondor,djw8605/htcondor,neurodebian/htcondor,htcondor/htcondor,mambelli/osg-bosco-marco,mambelli/osg-bosco-marco,djw8605/htcondor,neurodebian/htcondor,mambelli/osg-bosco-marco,bbockelm/condor-network-accounting,zhangzhehust/htcondor,djw8605/condor,djw8605/condor,djw8605/htcondor,clalancette/condor-dcloud,djw8605/htcondor,htcondor/htcondor,bbockelm/condor-network-accounting,bbockelm/condor-network-accounting,clalancette/condor-dcloud,djw8605/condor,bbockelm/condor-network-accounting,neurodebian/htcondor,mambelli/osg-bosco-marco,htcondor/htcondor,neurodebian/htcondor,djw8605/condor,neurodebian/htcondor,zhangzhehust/htcondor,djw8605/htcondor,htcondor/htcondor,djw8605/htcondor,neurodebian/htcondor,htcondor/htcondor,zhangzhehust/htcondor,zhangzhehust/htcondor,zhangzhehust/htcondor,clalancette/condor-dcloud,bbockelm/condor-network-accounting,djw8605/htcondor,bbockelm/condor-network-accounting,mambelli/osg-bosco-marco,neurodebian/htcondor,neurodebian/htcondor,clalancette/condor-dcloud,htcondor/htcondor,clalancette/condor-dcloud,clalancette/condor-dcloud,djw8605/condor,mambelli/osg-bosco-marco,htcondor/htcondor,zhangzhehust/htcondor,mambelli/osg-bosco-marco,htcondor/htcondor,zhangzhehust/htcondor,neurodebian/htcondor,zhangzhehust/htcondor,bbockelm/condor-network-accounting
|
c
|
## Code Before:
/*
For some reason the g++ include files on Ultrix 4.3 fail to provide
these prototypes, even though the Ultrix 4.2 versions did...
Once again, OSF1 also chokes, unless _AES_SOURCE(?) is defined JCP
*/
#if defined(ULTRIX43) || defined(SUNOS41) || defined(OSF1)
#if defined(__cplusplus)
extern "C" {
#endif
#if defined(SUNOS41) || defined(ULTRIX43)
typedef unsigned long ssize_t;
#endif
#if defined(__STDC__) || defined(__cplusplus)
int symlink( const char *, const char * );
void *sbrk( ssize_t );
int gethostname( char *, int );
#else
int symlink();
char *sbrk();
int gethostname();
#endif
#if defined(__cplusplus)
}
#endif
#endif /* ULTRIX43 */
#endif
## Instruction:
Fix up prototypes for read() and write() on Suns.
## Code After:
/*
For some reason the g++ include files on Ultrix 4.3 fail to provide
these prototypes, even though the Ultrix 4.2 versions did...
Once again, OSF1 also chokes, unless _AES_SOURCE(?) is defined JCP
*/
#if defined(ULTRIX43) || defined(SUNOS41) || defined(OSF1)
#if defined(__cplusplus)
extern "C" {
#endif
#if defined(SUNOS41) || defined(ULTRIX43)
typedef unsigned long ssize_t;
#endif
#if defined(__STDC__) || defined(__cplusplus)
int symlink( const char *, const char * );
void *sbrk( ssize_t );
int gethostname( char *, int );
# if defined(SUNOS41)
ssize_t read( int, void *, size_t );
ssize_t write( int, const void *, size_t );
# endif
#else
int symlink();
char *sbrk();
int gethostname();
#endif
#if defined(__cplusplus)
}
#endif
#endif /* ULTRIX43 */
#endif
|
# ... existing code ...
int symlink( const char *, const char * );
void *sbrk( ssize_t );
int gethostname( char *, int );
# if defined(SUNOS41)
ssize_t read( int, void *, size_t );
ssize_t write( int, const void *, size_t );
# endif
#else
int symlink();
char *sbrk();
# ... rest of the code ...
|
34d2be5fddf51413062f0995bfcb3b9401ffcef1
|
utils.h
|
utils.h
|
namespace me {
template<class T>
class Optional {
T *m_object;
public:
Optional() : m_object(0) {}
Optional(const T& other) : m_object(new T(other)) {}
Optional& operator=(const T& other) {
if (m_object != 0) {
delete m_object;
}
m_object = new T(other);
return *this;
}
operator bool() const {
return m_object != 0;
}
T& operator *() const {
return *m_object;
}
T* operator ->() const {
return m_object;
}
T* pointer() const {
return m_object;
}
};
// splitting.
std::vector<std::string> &split(const std::string &s, char delim, std::vector<std::string> &elems);
std::vector<std::string> split(const std::string &s, char delim);
}
|
namespace me {
// basic optional implementation.
template<class T>
class Optional {
T *m_object;
public:
Optional() : m_object(0) { }
// copy constructor
Optional(const Optional<T>& other) : m_object(new T(*other)) { }
// move constructor
Optional(Optional<T>&& other) : m_object(other.m_object) {
other.m_object = 0;
}
Optional(const T& other) : m_object(new T(other)) { }
// destructor
~Optional() {
delete m_object; // delete 0 is a no-op, so we are fine here.
}
// copy assignment operator
Optional& operator=(const Optional<T>& other) {
delete m_object; // delete 0 is a no-op
m_object = new T(*other);
return *this;
}
Optional& operator=(const T& other) {
delete m_object; // delete 0 is a no-op
m_object = new T(other);
return *this;
}
// move assignment operator
Optional& operator=(Optional<T>&& other) {
delete m_object; // delete 0 is a no-op
m_object = other.m_object;
other.m_object = 0;
return *this;
}
operator bool() const {
return m_object != 0;
}
T& operator *() const {
return *m_object;
}
T* operator ->() const {
return m_object;
}
T* pointer() const {
return m_object;
}
};
// splitting.
std::vector<std::string> &split(const std::string &s, char delim, std::vector<std::string> &elems);
std::vector<std::string> split(const std::string &s, char delim);
}
|
Use move semantics in Optional implementation.
|
Use move semantics in Optional implementation.
|
C
|
mit
|
ckarmann/TrackCommit,ckarmann/TrackCommit
|
c
|
## Code Before:
namespace me {
template<class T>
class Optional {
T *m_object;
public:
Optional() : m_object(0) {}
Optional(const T& other) : m_object(new T(other)) {}
Optional& operator=(const T& other) {
if (m_object != 0) {
delete m_object;
}
m_object = new T(other);
return *this;
}
operator bool() const {
return m_object != 0;
}
T& operator *() const {
return *m_object;
}
T* operator ->() const {
return m_object;
}
T* pointer() const {
return m_object;
}
};
// splitting.
std::vector<std::string> &split(const std::string &s, char delim, std::vector<std::string> &elems);
std::vector<std::string> split(const std::string &s, char delim);
}
## Instruction:
Use move semantics in Optional implementation.
## Code After:
namespace me {
// basic optional implementation.
template<class T>
class Optional {
T *m_object;
public:
Optional() : m_object(0) { }
// copy constructor
Optional(const Optional<T>& other) : m_object(new T(*other)) { }
// move constructor
Optional(Optional<T>&& other) : m_object(other.m_object) {
other.m_object = 0;
}
Optional(const T& other) : m_object(new T(other)) { }
// destructor
~Optional() {
delete m_object; // delete 0 is a no-op, so we are fine here.
}
// copy assignment operator
Optional& operator=(const Optional<T>& other) {
delete m_object; // delete 0 is a no-op
m_object = new T(*other);
return *this;
}
Optional& operator=(const T& other) {
delete m_object; // delete 0 is a no-op
m_object = new T(other);
return *this;
}
// move assignment operator
Optional& operator=(Optional<T>&& other) {
delete m_object; // delete 0 is a no-op
m_object = other.m_object;
other.m_object = 0;
return *this;
}
operator bool() const {
return m_object != 0;
}
T& operator *() const {
return *m_object;
}
T* operator ->() const {
return m_object;
}
T* pointer() const {
return m_object;
}
};
// splitting.
std::vector<std::string> &split(const std::string &s, char delim, std::vector<std::string> &elems);
std::vector<std::string> split(const std::string &s, char delim);
}
|
# ... existing code ...
namespace me {
// basic optional implementation.
template<class T>
class Optional {
T *m_object;
public:
Optional() : m_object(0) { }
// copy constructor
Optional(const Optional<T>& other) : m_object(new T(*other)) { }
// move constructor
Optional(Optional<T>&& other) : m_object(other.m_object) {
other.m_object = 0;
}
Optional(const T& other) : m_object(new T(other)) { }
// destructor
~Optional() {
delete m_object; // delete 0 is a no-op, so we are fine here.
}
// copy assignment operator
Optional& operator=(const Optional<T>& other) {
delete m_object; // delete 0 is a no-op
m_object = new T(*other);
return *this;
}
Optional& operator=(const T& other) {
delete m_object; // delete 0 is a no-op
m_object = new T(other);
return *this;
}
// move assignment operator
Optional& operator=(Optional<T>&& other) {
delete m_object; // delete 0 is a no-op
m_object = other.m_object;
other.m_object = 0;
return *this;
}
# ... rest of the code ...
|
457a40d3487d59147bcea71dd06f49317167c8d1
|
hash_table.py
|
hash_table.py
|
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
class HashItem(object):
def __init__(self, key, value):
self.key = key
self.value = value
class Hash(object):
def __init__(self, size=1024):
self.table = []
for i in range(size):
self.table.append(list())
def hash(self, key):
hash_value = 0
for i in key:
hash_value += ord(key)
return hash_value % len(self.table)
def get(self, key):
hashed_key = self.hash(key)
for k in self.table[hashed_key]:
if k[0] == key:
return k[1]
else:
raise KeyError('Value not found')
def set(self):
pass
|
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
class HashItem(object):
def __init__(self, key, value):
self.key = key
self.value = value
class Hash(object):
def __init__(self, size=1024):
self.table = []
for i in range(size):
self.table.append(list())
def hash(self, key):
hash_value = 0
for i in key:
hash_value += ord(key)
return hash_value % len(self.table)
def get(self, key):
hashed_key = self.hash(key)
for k in self.table[hashed_key]:
if k[0] == key:
return k[1]
else:
raise KeyError('Value not found')
def set(self, key, val):
hashed_key = self.hash(key)
self.table[hashed_key].append((key, val))
|
Build out set function of hash table class; still need to deal with outcome of setting multiple values to same key
|
Build out set function of hash table class; still need to deal with outcome of setting multiple values to same key
|
Python
|
mit
|
jwarren116/data-structures-deux
|
python
|
## Code Before:
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
class HashItem(object):
def __init__(self, key, value):
self.key = key
self.value = value
class Hash(object):
def __init__(self, size=1024):
self.table = []
for i in range(size):
self.table.append(list())
def hash(self, key):
hash_value = 0
for i in key:
hash_value += ord(key)
return hash_value % len(self.table)
def get(self, key):
hashed_key = self.hash(key)
for k in self.table[hashed_key]:
if k[0] == key:
return k[1]
else:
raise KeyError('Value not found')
def set(self):
pass
## Instruction:
Build out set function of hash table class; still need to deal with outcome of setting multiple values to same key
## Code After:
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
class HashItem(object):
def __init__(self, key, value):
self.key = key
self.value = value
class Hash(object):
def __init__(self, size=1024):
self.table = []
for i in range(size):
self.table.append(list())
def hash(self, key):
hash_value = 0
for i in key:
hash_value += ord(key)
return hash_value % len(self.table)
def get(self, key):
hashed_key = self.hash(key)
for k in self.table[hashed_key]:
if k[0] == key:
return k[1]
else:
raise KeyError('Value not found')
def set(self, key, val):
hashed_key = self.hash(key)
self.table[hashed_key].append((key, val))
|
# ... existing code ...
else:
raise KeyError('Value not found')
def set(self, key, val):
hashed_key = self.hash(key)
self.table[hashed_key].append((key, val))
# ... rest of the code ...
|
9f7a4b33285e0ce6215c26a9e195563d081841a4
|
src/java/com/collabnet/svnedge/schema/MS009EnableCloudService.java
|
src/java/com/collabnet/svnedge/schema/MS009EnableCloudService.java
|
/*
* CollabNet Subversion Edge
* Copyright (C) 2011, CollabNet Inc. All rights reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.collabnet.svnedge.schema;
import org.apache.log4j.Logger;
import java.sql.SQLException;
public class MS009EnableCloudService implements MigrationScript {
private Logger log = Logger.getLogger(getClass());
public boolean migrate(SqlUtil db) throws SQLException {
db.executeUpdateSql("update CLOUD_SERVICES_CONFIGURATION " +
"set ENABLED = true");
return false;
}
public int[] getVersion() {
return new int[] {2,4,1};
}
}
|
/*
* CollabNet Subversion Edge
* Copyright (C) 2011, CollabNet Inc. All rights reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.collabnet.svnedge.schema;
import org.apache.log4j.Logger;
import java.sql.SQLException;
public class MS009EnableCloudService implements MigrationScript {
private Logger log = Logger.getLogger(getClass());
public boolean migrate(SqlUtil db) throws SQLException {
// insert row, if it does not exist
db.executeUpdateSql("insert into CLOUD_SERVICES_CONFIGURATION " +
"(select 1 as ID, 0 as VERSION, '' as DOMAIN, '' as USERNAME," +
" '' as PASSWORD, true as ENABLED from " +
"(select 1 as ID, count(*) from CLOUD_SERVICES_CONFIGURATION)" +
" where ID not in " +
"(select ID from CLOUD_SERVICES_CONFIGURATION))");
// for installations where a row already existed
db.executeUpdateSql("update CLOUD_SERVICES_CONFIGURATION " +
"set ENABLED = true");
return false;
}
public int[] getVersion() {
return new int[] {3,0,0};
}
}
|
Enable cloud services by default
|
[artf7057]: Enable cloud services by default
* src/java/com/collabnet/svnedge/schema/MS009EnableCloudService.java
Need to add the row, if it does not exist.
git-svn-id: ec3c21ed833430e32134c61539c75518a6355d4e@2982 03e8f217-bfc6-4b7c-bcb7-0738c91e2c5f
|
Java
|
agpl-3.0
|
marcellodesales/svnedge-console,marcellodesales/svnedge-console,marcellodesales/svnedge-console,marcellodesales/svnedge-console,marcellodesales/svnedge-console
|
java
|
## Code Before:
/*
* CollabNet Subversion Edge
* Copyright (C) 2011, CollabNet Inc. All rights reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.collabnet.svnedge.schema;
import org.apache.log4j.Logger;
import java.sql.SQLException;
public class MS009EnableCloudService implements MigrationScript {
private Logger log = Logger.getLogger(getClass());
public boolean migrate(SqlUtil db) throws SQLException {
db.executeUpdateSql("update CLOUD_SERVICES_CONFIGURATION " +
"set ENABLED = true");
return false;
}
public int[] getVersion() {
return new int[] {2,4,1};
}
}
## Instruction:
[artf7057]: Enable cloud services by default
* src/java/com/collabnet/svnedge/schema/MS009EnableCloudService.java
Need to add the row, if it does not exist.
git-svn-id: ec3c21ed833430e32134c61539c75518a6355d4e@2982 03e8f217-bfc6-4b7c-bcb7-0738c91e2c5f
## Code After:
/*
* CollabNet Subversion Edge
* Copyright (C) 2011, CollabNet Inc. All rights reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.collabnet.svnedge.schema;
import org.apache.log4j.Logger;
import java.sql.SQLException;
public class MS009EnableCloudService implements MigrationScript {
private Logger log = Logger.getLogger(getClass());
public boolean migrate(SqlUtil db) throws SQLException {
// insert row, if it does not exist
db.executeUpdateSql("insert into CLOUD_SERVICES_CONFIGURATION " +
"(select 1 as ID, 0 as VERSION, '' as DOMAIN, '' as USERNAME," +
" '' as PASSWORD, true as ENABLED from " +
"(select 1 as ID, count(*) from CLOUD_SERVICES_CONFIGURATION)" +
" where ID not in " +
"(select ID from CLOUD_SERVICES_CONFIGURATION))");
// for installations where a row already existed
db.executeUpdateSql("update CLOUD_SERVICES_CONFIGURATION " +
"set ENABLED = true");
return false;
}
public int[] getVersion() {
return new int[] {3,0,0};
}
}
|
# ... existing code ...
private Logger log = Logger.getLogger(getClass());
public boolean migrate(SqlUtil db) throws SQLException {
// insert row, if it does not exist
db.executeUpdateSql("insert into CLOUD_SERVICES_CONFIGURATION " +
"(select 1 as ID, 0 as VERSION, '' as DOMAIN, '' as USERNAME," +
" '' as PASSWORD, true as ENABLED from " +
"(select 1 as ID, count(*) from CLOUD_SERVICES_CONFIGURATION)" +
" where ID not in " +
"(select ID from CLOUD_SERVICES_CONFIGURATION))");
// for installations where a row already existed
db.executeUpdateSql("update CLOUD_SERVICES_CONFIGURATION " +
"set ENABLED = true");
return false;
# ... modified code ...
}
public int[] getVersion() {
return new int[] {3,0,0};
}
}
# ... rest of the code ...
|
191ba72a2d8b2e47363fcdbd200549ff3eef18fb
|
plex/objects/library/section.py
|
plex/objects/library/section.py
|
from plex.core.helpers import to_iterable
from plex.objects.container import Container
from plex.objects.core.base import Property
from plex.objects.directory import Directory
class Section(Directory):
uuid = Property
filters = Property(type=bool)
refreshing = Property(type=bool)
agent = Property
scanner = Property
language = Property
created_at = Property('createdAt', int)
def __transform__(self):
self.path = '/library/sections/%s' % self.key
def all(self):
response = self.http.get('all')
return self.parse(response, {
'MediaContainer': ('MediaContainer', {
'Directory': {
'artist': 'Artist',
'show': 'Show'
},
'Video': {
'movie': 'Movie'
}
})
})
class SectionContainer(Container):
filter_passes = lambda _, allowed, value: allowed is None or value in allowed
def filter(self, types=None, keys=None, titles=None):
types = to_iterable(types)
keys = to_iterable(keys)
titles = [x.lower() for x in to_iterable(titles)]
for section in self:
if not self.filter_passes(types, section.type):
continue
if not self.filter_passes(keys, section.key):
continue
if not self.filter_passes(titles, section.title.lower()):
continue
yield section
|
from plex.core.helpers import to_iterable
from plex.objects.container import Container
from plex.objects.core.base import Property
from plex.objects.directory import Directory
class Section(Directory):
uuid = Property
filters = Property(type=bool)
refreshing = Property(type=bool)
agent = Property
scanner = Property
language = Property
created_at = Property('createdAt', int)
def __transform__(self):
self.path = '/library/sections/%s' % self.key
def all(self):
response = self.http.get('all')
return self.parse(response, {
'MediaContainer': ('MediaContainer', {
'Directory': {
'artist': 'Artist',
'show': 'Show'
},
'Video': {
'movie': 'Movie'
}
})
})
class SectionContainer(Container):
filter_passes = lambda _, allowed, value: allowed is None or value in allowed
def filter(self, types=None, keys=None, titles=None):
types = to_iterable(types)
keys = to_iterable(keys)
titles = to_iterable(titles)
if titles:
# Normalize titles
titles = [x.lower() for x in titles]
for section in self:
if not self.filter_passes(types, section.type):
continue
if not self.filter_passes(keys, section.key):
continue
if not self.filter_passes(titles, section.title.lower()):
continue
yield section
|
Fix issue with "titles" in SectionContainer.filter()
|
Fix issue with "titles" in SectionContainer.filter()
|
Python
|
mit
|
fuzeman/plex.py
|
python
|
## Code Before:
from plex.core.helpers import to_iterable
from plex.objects.container import Container
from plex.objects.core.base import Property
from plex.objects.directory import Directory
class Section(Directory):
uuid = Property
filters = Property(type=bool)
refreshing = Property(type=bool)
agent = Property
scanner = Property
language = Property
created_at = Property('createdAt', int)
def __transform__(self):
self.path = '/library/sections/%s' % self.key
def all(self):
response = self.http.get('all')
return self.parse(response, {
'MediaContainer': ('MediaContainer', {
'Directory': {
'artist': 'Artist',
'show': 'Show'
},
'Video': {
'movie': 'Movie'
}
})
})
class SectionContainer(Container):
filter_passes = lambda _, allowed, value: allowed is None or value in allowed
def filter(self, types=None, keys=None, titles=None):
types = to_iterable(types)
keys = to_iterable(keys)
titles = [x.lower() for x in to_iterable(titles)]
for section in self:
if not self.filter_passes(types, section.type):
continue
if not self.filter_passes(keys, section.key):
continue
if not self.filter_passes(titles, section.title.lower()):
continue
yield section
## Instruction:
Fix issue with "titles" in SectionContainer.filter()
## Code After:
from plex.core.helpers import to_iterable
from plex.objects.container import Container
from plex.objects.core.base import Property
from plex.objects.directory import Directory
class Section(Directory):
uuid = Property
filters = Property(type=bool)
refreshing = Property(type=bool)
agent = Property
scanner = Property
language = Property
created_at = Property('createdAt', int)
def __transform__(self):
self.path = '/library/sections/%s' % self.key
def all(self):
response = self.http.get('all')
return self.parse(response, {
'MediaContainer': ('MediaContainer', {
'Directory': {
'artist': 'Artist',
'show': 'Show'
},
'Video': {
'movie': 'Movie'
}
})
})
class SectionContainer(Container):
filter_passes = lambda _, allowed, value: allowed is None or value in allowed
def filter(self, types=None, keys=None, titles=None):
types = to_iterable(types)
keys = to_iterable(keys)
titles = to_iterable(titles)
if titles:
# Normalize titles
titles = [x.lower() for x in titles]
for section in self:
if not self.filter_passes(types, section.type):
continue
if not self.filter_passes(keys, section.key):
continue
if not self.filter_passes(titles, section.title.lower()):
continue
yield section
|
...
types = to_iterable(types)
keys = to_iterable(keys)
titles = to_iterable(titles)
if titles:
# Normalize titles
titles = [x.lower() for x in titles]
for section in self:
if not self.filter_passes(types, section.type):
...
|
f9ba5e64f73c3fa3fed62655c846fb4435d627cc
|
node/multi_var.py
|
node/multi_var.py
|
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
if len(stack) == 0:
self.add_arg(stack)
@Node.is_func
def apply(self, *stack):
self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
Fix multivar for nodes with variable lenght stacks
|
Fix multivar for nodes with variable lenght stacks
|
Python
|
mit
|
muddyfish/PYKE,muddyfish/PYKE
|
python
|
## Code Before:
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
if len(stack) == 0:
self.add_arg(stack)
@Node.is_func
def apply(self, *stack):
self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
## Instruction:
Fix multivar for nodes with variable lenght stacks
## Code After:
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
# ... existing code ...
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
# ... rest of the code ...
|
02da53951e48fd6b164d883cdf5c63c7b7f08049
|
rmake_plugins/multinode_client/nodetypes.py
|
rmake_plugins/multinode_client/nodetypes.py
|
import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
class NodeType(object):
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
_nodeTypes = {}
def registerNodeTypes(moduleName):
global _nodeTypes
for item in sys.modules[moduleName].__dict__.values():
if inspect.isclass(item) and issubclass(item, NodeType):
_nodeTypes[item.nodeType] = item
registerNodeTypes(__name__)
def registerNodeType(class_):
_nodeTypes[class_.nodeType] = class_
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
|
import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
_nodeTypes = {}
class _NodeTypeRegistrar(type):
def __init__(self, name, bases, dict):
type.__init__(self, name, bases, dict)
_nodeTypes[self.nodeType] = self
class NodeType(object):
__metaclass__ = _NodeTypeRegistrar
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
|
Use metaclasses to register node types.
|
Use metaclasses to register node types.
|
Python
|
apache-2.0
|
fedora-conary/rmake-2,fedora-conary/rmake-2,fedora-conary/rmake-2,fedora-conary/rmake-2
|
python
|
## Code Before:
import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
class NodeType(object):
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
_nodeTypes = {}
def registerNodeTypes(moduleName):
global _nodeTypes
for item in sys.modules[moduleName].__dict__.values():
if inspect.isclass(item) and issubclass(item, NodeType):
_nodeTypes[item.nodeType] = item
registerNodeTypes(__name__)
def registerNodeType(class_):
_nodeTypes[class_.nodeType] = class_
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
## Instruction:
Use metaclasses to register node types.
## Code After:
import inspect
import sys
import types
from rmake.lib.apiutils import thaw, freeze
_nodeTypes = {}
class _NodeTypeRegistrar(type):
def __init__(self, name, bases, dict):
type.__init__(self, name, bases, dict)
_nodeTypes[self.nodeType] = self
class NodeType(object):
__metaclass__ = _NodeTypeRegistrar
nodeType = 'UNKNOWN'
def __init__(self):
pass
def freeze(self):
return (self.nodeType, self.__dict__)
@classmethod
def thaw(class_, d):
return class_(**d)
class Client(NodeType):
nodeType = 'CLIENT'
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
|
...
from rmake.lib.apiutils import thaw, freeze
_nodeTypes = {}
class _NodeTypeRegistrar(type):
def __init__(self, name, bases, dict):
type.__init__(self, name, bases, dict)
_nodeTypes[self.nodeType] = self
class NodeType(object):
__metaclass__ = _NodeTypeRegistrar
nodeType = 'UNKNOWN'
def __init__(self):
pass
...
class Client(NodeType):
nodeType = 'CLIENT'
def thawNodeType(info):
nodeType = info[0]
return _nodeTypes[nodeType].thaw(info[1])
...
|
aa5bc77e78e82fbe63acf2fd8f6764a420f2e4e8
|
simuvex/procedures/stubs/caller.py
|
simuvex/procedures/stubs/caller.py
|
import simuvex
######################################
# Caller
######################################
class Caller(simuvex.SimProcedure):
"""
Caller stub. Creates a Ijk_Call exit to the specified function
"""
def run(self, target_addr=None):
self.call(target_addr, [ ], self.after_call)
def after_call(self):
pass
|
import simuvex
######################################
# Caller
######################################
class Caller(simuvex.SimProcedure):
"""
Caller stub. Creates a Ijk_Call exit to the specified function
"""
NO_RET = True
def run(self, target_addr=None):
self.call(target_addr, [ ], self.after_call)
def after_call(self):
pass
|
Make sure Caller does not return
|
Make sure Caller does not return
|
Python
|
bsd-2-clause
|
axt/angr,chubbymaggie/angr,iamahuman/angr,tyb0807/angr,tyb0807/angr,chubbymaggie/angr,schieb/angr,iamahuman/angr,schieb/angr,angr/angr,angr/simuvex,iamahuman/angr,chubbymaggie/angr,axt/angr,angr/angr,chubbymaggie/simuvex,tyb0807/angr,angr/angr,f-prettyland/angr,f-prettyland/angr,axt/angr,schieb/angr,chubbymaggie/simuvex,chubbymaggie/simuvex,f-prettyland/angr
|
python
|
## Code Before:
import simuvex
######################################
# Caller
######################################
class Caller(simuvex.SimProcedure):
"""
Caller stub. Creates a Ijk_Call exit to the specified function
"""
def run(self, target_addr=None):
self.call(target_addr, [ ], self.after_call)
def after_call(self):
pass
## Instruction:
Make sure Caller does not return
## Code After:
import simuvex
######################################
# Caller
######################################
class Caller(simuvex.SimProcedure):
"""
Caller stub. Creates a Ijk_Call exit to the specified function
"""
NO_RET = True
def run(self, target_addr=None):
self.call(target_addr, [ ], self.after_call)
def after_call(self):
pass
|
...
Caller stub. Creates a Ijk_Call exit to the specified function
"""
NO_RET = True
def run(self, target_addr=None):
self.call(target_addr, [ ], self.after_call)
...
|
4612f10a8d4dcd0ec7133b12411387c74becbdb7
|
tests/__init__.py
|
tests/__init__.py
|
import sublime
import os
import os.path
import unittest
class CommandTestCase(unittest.TestCase):
def setUp(self):
self.project_data = {
'code_search': {'csearchindex': 'test_csearchindex'},
'folders': [{'path': '.'}]}
sublime.active_window().run_command('new_window')
self.window = sublime.active_window()
self.window.set_project_data(self.project_data)
self.view = self.window.new_file()
def tearDown(self):
self.view.set_scratch(True)
self.window.focus_view(self.view)
self.window.run_command('close_file')
self.window.run_command('close_window')
if os.path.isfile('test_csearchindex'):
os.remove('test_csearchindex')
|
import sublime
import os
import os.path
import unittest
class CommandTestCase(unittest.TestCase):
def setUp(self):
path = '{0}/YetAnotherCodeSearch'.format(sublime.packages_path())
self.project_data = {
'code_search': {'csearchindex': 'test_csearchindex'},
'folders': [{'path': path}]}
sublime.active_window().run_command('new_window')
self.window = sublime.active_window()
self.window.set_project_data(self.project_data)
self.view = self.window.new_file()
def tearDown(self):
self.view.set_scratch(True)
self.window.focus_view(self.view)
self.window.run_command('close_file')
self.window.run_command('close_window')
if os.path.isfile('test_csearchindex'):
os.remove('test_csearchindex')
|
Set the test path to the project in Packages.
|
Set the test path to the project in Packages.
|
Python
|
mit
|
pope/SublimeYetAnotherCodeSearch,pope/SublimeYetAnotherCodeSearch
|
python
|
## Code Before:
import sublime
import os
import os.path
import unittest
class CommandTestCase(unittest.TestCase):
def setUp(self):
self.project_data = {
'code_search': {'csearchindex': 'test_csearchindex'},
'folders': [{'path': '.'}]}
sublime.active_window().run_command('new_window')
self.window = sublime.active_window()
self.window.set_project_data(self.project_data)
self.view = self.window.new_file()
def tearDown(self):
self.view.set_scratch(True)
self.window.focus_view(self.view)
self.window.run_command('close_file')
self.window.run_command('close_window')
if os.path.isfile('test_csearchindex'):
os.remove('test_csearchindex')
## Instruction:
Set the test path to the project in Packages.
## Code After:
import sublime
import os
import os.path
import unittest
class CommandTestCase(unittest.TestCase):
def setUp(self):
path = '{0}/YetAnotherCodeSearch'.format(sublime.packages_path())
self.project_data = {
'code_search': {'csearchindex': 'test_csearchindex'},
'folders': [{'path': path}]}
sublime.active_window().run_command('new_window')
self.window = sublime.active_window()
self.window.set_project_data(self.project_data)
self.view = self.window.new_file()
def tearDown(self):
self.view.set_scratch(True)
self.window.focus_view(self.view)
self.window.run_command('close_file')
self.window.run_command('close_window')
if os.path.isfile('test_csearchindex'):
os.remove('test_csearchindex')
|
...
class CommandTestCase(unittest.TestCase):
def setUp(self):
path = '{0}/YetAnotherCodeSearch'.format(sublime.packages_path())
self.project_data = {
'code_search': {'csearchindex': 'test_csearchindex'},
'folders': [{'path': path}]}
sublime.active_window().run_command('new_window')
self.window = sublime.active_window()
self.window.set_project_data(self.project_data)
...
|
784e657183e783a2db4c42e18d1e2224e408b052
|
src/botansqlite3/botansqlite3.c
|
src/botansqlite3/botansqlite3.c
|
/*
* (C) 2016 Daniel Seither (Kullo GmbH)
*
* Distributed under the terms of the Botan license
*/
#define SQLITE_HAS_CODEC 1
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wcast-align"
#pragma clang diagnostic ignored "-Wcast-qual"
#pragma clang diagnostic ignored "-Wconversion"
#pragma clang diagnostic ignored "-Wdisabled-macro-expansion"
#pragma clang diagnostic ignored "-Wdouble-promotion"
#pragma clang diagnostic ignored "-Wfloat-equal"
#pragma clang diagnostic ignored "-Wmissing-noreturn"
#pragma clang diagnostic ignored "-Wmissing-prototypes"
#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
#pragma clang diagnostic ignored "-Wparentheses-equality"
#pragma clang diagnostic ignored "-Wreserved-id-macro"
#pragma clang diagnostic ignored "-Wsign-compare"
#pragma clang diagnostic ignored "-Wsign-conversion"
#pragma clang diagnostic ignored "-Wundef"
#pragma clang diagnostic ignored "-Wunreachable-code"
#pragma clang diagnostic ignored "-Wunreachable-code-break"
#pragma clang diagnostic ignored "-Wunused-macros"
#pragma clang diagnostic ignored "-Wunused-function"
#pragma clang diagnostic ignored "-Wunused-value"
#include "../sqlite3.c"
#pragma clang diagnostic pop
#include "codecext.c"
|
/*
* (C) 2016 Daniel Seither (Kullo GmbH)
*
* Distributed under the terms of the Botan license
*/
#define SQLITE_HAS_CODEC 1
#if defined __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wcast-align"
#pragma clang diagnostic ignored "-Wcast-qual"
#pragma clang diagnostic ignored "-Wconversion"
#pragma clang diagnostic ignored "-Wdisabled-macro-expansion"
#pragma clang diagnostic ignored "-Wdouble-promotion"
#pragma clang diagnostic ignored "-Wfloat-equal"
#pragma clang diagnostic ignored "-Wmissing-noreturn"
#pragma clang diagnostic ignored "-Wmissing-prototypes"
#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
#pragma clang diagnostic ignored "-Wparentheses-equality"
#pragma clang diagnostic ignored "-Wreserved-id-macro"
#pragma clang diagnostic ignored "-Wsign-compare"
#pragma clang diagnostic ignored "-Wsign-conversion"
#pragma clang diagnostic ignored "-Wundef"
#pragma clang diagnostic ignored "-Wunreachable-code"
#pragma clang diagnostic ignored "-Wunreachable-code-break"
#pragma clang diagnostic ignored "-Wunused-macros"
#pragma clang diagnostic ignored "-Wunused-function"
#pragma clang diagnostic ignored "-Wunused-value"
#endif
#include "../sqlite3.c"
#if defined __clang__
#pragma clang diagnostic pop
#endif
#include "codecext.c"
|
Fix unknown pragma warnings on MSVC
|
Fix unknown pragma warnings on MSVC
|
C
|
bsd-3-clause
|
kullo/smartsqlite,kullo/smartsqlite,kullo/smartsqlite
|
c
|
## Code Before:
/*
* (C) 2016 Daniel Seither (Kullo GmbH)
*
* Distributed under the terms of the Botan license
*/
#define SQLITE_HAS_CODEC 1
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wcast-align"
#pragma clang diagnostic ignored "-Wcast-qual"
#pragma clang diagnostic ignored "-Wconversion"
#pragma clang diagnostic ignored "-Wdisabled-macro-expansion"
#pragma clang diagnostic ignored "-Wdouble-promotion"
#pragma clang diagnostic ignored "-Wfloat-equal"
#pragma clang diagnostic ignored "-Wmissing-noreturn"
#pragma clang diagnostic ignored "-Wmissing-prototypes"
#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
#pragma clang diagnostic ignored "-Wparentheses-equality"
#pragma clang diagnostic ignored "-Wreserved-id-macro"
#pragma clang diagnostic ignored "-Wsign-compare"
#pragma clang diagnostic ignored "-Wsign-conversion"
#pragma clang diagnostic ignored "-Wundef"
#pragma clang diagnostic ignored "-Wunreachable-code"
#pragma clang diagnostic ignored "-Wunreachable-code-break"
#pragma clang diagnostic ignored "-Wunused-macros"
#pragma clang diagnostic ignored "-Wunused-function"
#pragma clang diagnostic ignored "-Wunused-value"
#include "../sqlite3.c"
#pragma clang diagnostic pop
#include "codecext.c"
## Instruction:
Fix unknown pragma warnings on MSVC
## Code After:
/*
* (C) 2016 Daniel Seither (Kullo GmbH)
*
* Distributed under the terms of the Botan license
*/
#define SQLITE_HAS_CODEC 1
#if defined __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wcast-align"
#pragma clang diagnostic ignored "-Wcast-qual"
#pragma clang diagnostic ignored "-Wconversion"
#pragma clang diagnostic ignored "-Wdisabled-macro-expansion"
#pragma clang diagnostic ignored "-Wdouble-promotion"
#pragma clang diagnostic ignored "-Wfloat-equal"
#pragma clang diagnostic ignored "-Wmissing-noreturn"
#pragma clang diagnostic ignored "-Wmissing-prototypes"
#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
#pragma clang diagnostic ignored "-Wparentheses-equality"
#pragma clang diagnostic ignored "-Wreserved-id-macro"
#pragma clang diagnostic ignored "-Wsign-compare"
#pragma clang diagnostic ignored "-Wsign-conversion"
#pragma clang diagnostic ignored "-Wundef"
#pragma clang diagnostic ignored "-Wunreachable-code"
#pragma clang diagnostic ignored "-Wunreachable-code-break"
#pragma clang diagnostic ignored "-Wunused-macros"
#pragma clang diagnostic ignored "-Wunused-function"
#pragma clang diagnostic ignored "-Wunused-value"
#endif
#include "../sqlite3.c"
#if defined __clang__
#pragma clang diagnostic pop
#endif
#include "codecext.c"
|
...
#define SQLITE_HAS_CODEC 1
#if defined __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wcast-align"
#pragma clang diagnostic ignored "-Wcast-qual"
#pragma clang diagnostic ignored "-Wconversion"
#pragma clang diagnostic ignored "-Wdisabled-macro-expansion"
#pragma clang diagnostic ignored "-Wdouble-promotion"
#pragma clang diagnostic ignored "-Wfloat-equal"
#pragma clang diagnostic ignored "-Wmissing-noreturn"
#pragma clang diagnostic ignored "-Wmissing-prototypes"
#pragma clang diagnostic ignored "-Wmissing-variable-declarations"
#pragma clang diagnostic ignored "-Wparentheses-equality"
#pragma clang diagnostic ignored "-Wreserved-id-macro"
#pragma clang diagnostic ignored "-Wsign-compare"
#pragma clang diagnostic ignored "-Wsign-conversion"
#pragma clang diagnostic ignored "-Wundef"
#pragma clang diagnostic ignored "-Wunreachable-code"
#pragma clang diagnostic ignored "-Wunreachable-code-break"
#pragma clang diagnostic ignored "-Wunused-macros"
#pragma clang diagnostic ignored "-Wunused-function"
#pragma clang diagnostic ignored "-Wunused-value"
#endif
#include "../sqlite3.c"
#if defined __clang__
#pragma clang diagnostic pop
#endif
#include "codecext.c"
...
|
0e36a49d6a53f87cbe71fd5ec9dce524dd638122
|
fireplace/deck.py
|
fireplace/deck.py
|
import logging
import random
from .card import Card
from .enums import GameTag, Zone
from .utils import CardList
class Deck(CardList):
MAX_CARDS = 30
MAX_UNIQUE_CARDS = 2
MAX_UNIQUE_LEGENDARIES = 1
@classmethod
def fromList(cls, cards, hero):
return cls([Card(card) for card in cards], Card(hero))
def __init__(self, cards, hero, name=None):
super().__init__(cards)
self.hero = hero
if name is None:
name = "Custom %s" % (hero)
self.name = name
for card in cards:
# Don't use .zone directly as it would double-fill the deck
card.tags[GameTag.ZONE] = Zone.DECK
def __str__(self):
return self.name
def __repr__(self):
return "<%s (%i cards)>" % (self.hero, len(self))
def shuffle(self):
logging.info("Shuffling %r..." % (self))
random.shuffle(self)
|
import logging
import random
from .card import Card
from .enums import GameTag, Zone
from .utils import CardList
class Deck(CardList):
MAX_CARDS = 30
MAX_UNIQUE_CARDS = 2
MAX_UNIQUE_LEGENDARIES = 1
@classmethod
def fromList(cls, cards, hero):
return cls([Card(card) for card in cards], Card(hero))
def __init__(self, cards, hero):
super().__init__(cards)
self.hero = hero
for card in cards:
# Don't use .zone directly as it would double-fill the deck
card.tags[GameTag.ZONE] = Zone.DECK
def __repr__(self):
return "<Deck(hero=%r, count=%i)>" % (self.hero, len(self))
def shuffle(self):
logging.info("Shuffling %r..." % (self))
random.shuffle(self)
|
Drop support for naming Deck objects
|
Drop support for naming Deck objects
|
Python
|
agpl-3.0
|
smallnamespace/fireplace,Meerkov/fireplace,amw2104/fireplace,Ragowit/fireplace,beheh/fireplace,butozerca/fireplace,Ragowit/fireplace,amw2104/fireplace,liujimj/fireplace,smallnamespace/fireplace,jleclanche/fireplace,oftc-ftw/fireplace,oftc-ftw/fireplace,butozerca/fireplace,NightKev/fireplace,Meerkov/fireplace,liujimj/fireplace
|
python
|
## Code Before:
import logging
import random
from .card import Card
from .enums import GameTag, Zone
from .utils import CardList
class Deck(CardList):
MAX_CARDS = 30
MAX_UNIQUE_CARDS = 2
MAX_UNIQUE_LEGENDARIES = 1
@classmethod
def fromList(cls, cards, hero):
return cls([Card(card) for card in cards], Card(hero))
def __init__(self, cards, hero, name=None):
super().__init__(cards)
self.hero = hero
if name is None:
name = "Custom %s" % (hero)
self.name = name
for card in cards:
# Don't use .zone directly as it would double-fill the deck
card.tags[GameTag.ZONE] = Zone.DECK
def __str__(self):
return self.name
def __repr__(self):
return "<%s (%i cards)>" % (self.hero, len(self))
def shuffle(self):
logging.info("Shuffling %r..." % (self))
random.shuffle(self)
## Instruction:
Drop support for naming Deck objects
## Code After:
import logging
import random
from .card import Card
from .enums import GameTag, Zone
from .utils import CardList
class Deck(CardList):
MAX_CARDS = 30
MAX_UNIQUE_CARDS = 2
MAX_UNIQUE_LEGENDARIES = 1
@classmethod
def fromList(cls, cards, hero):
return cls([Card(card) for card in cards], Card(hero))
def __init__(self, cards, hero):
super().__init__(cards)
self.hero = hero
for card in cards:
# Don't use .zone directly as it would double-fill the deck
card.tags[GameTag.ZONE] = Zone.DECK
def __repr__(self):
return "<Deck(hero=%r, count=%i)>" % (self.hero, len(self))
def shuffle(self):
logging.info("Shuffling %r..." % (self))
random.shuffle(self)
|
// ... existing code ...
def fromList(cls, cards, hero):
return cls([Card(card) for card in cards], Card(hero))
def __init__(self, cards, hero):
super().__init__(cards)
self.hero = hero
for card in cards:
# Don't use .zone directly as it would double-fill the deck
card.tags[GameTag.ZONE] = Zone.DECK
def __repr__(self):
return "<Deck(hero=%r, count=%i)>" % (self.hero, len(self))
def shuffle(self):
logging.info("Shuffling %r..." % (self))
// ... rest of the code ...
|
2908999b320714a97fe5223d0f51237554392e48
|
framework/Source/GPUImageFilterPipeline.h
|
framework/Source/GPUImageFilterPipeline.h
|
@interface GPUImageFilterPipeline : NSObject
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageFilter*)filter;
- (void) addFilter:(GPUImageFilter*)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter*)filter;
- (void) replaceAllFilters:(NSArray*) newFilters;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
@end
|
@interface GPUImageFilterPipeline : NSObject
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageFilter*)filter;
- (void) addFilter:(GPUImageFilter*)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter*)filter;
- (void) replaceAllFilters:(NSArray*) newFilters;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
- (CGImageRef) newCGImageFromCurrentFilteredFrame;
@end
|
Make pipeline work for CGImage processing (missed out the header file)
|
Bugfix: Make pipeline work for CGImage processing (missed out the header file)
|
C
|
bsd-3-clause
|
HSFGitHub/GPUImage,stelabouras/GPUImage,Creolophus/GPUImage,Kevin775263419/GPUImage,jimydotorg/GGGPUImage,StratAguilar/GPUImage,tuo/GPUImage,3drobotics/GPUImage,yshiga/GPUImage,geowarsong/GPUImage,Matzo/GPUImage,rromanchuk/GPUImage,hgl888/GPUImage,mingming1986/GPUImage,BradLarson/GPUImage,ozetadev/GPUImage,powerhome/GPUImage,tuo/GPUImage,lauraskelton/GPUImage,drewwilson/GPUImage,AmiaoCat/GPUImage,BradLarson/GPUImage,njucsyyh/GPUImage,BlessNeo/GPUImage,u-engine/GPUImage,liduanw/GPUImage,dachaoisme/GPUImage,Naithar/GPUImage,alex-learn/GPUImage,evfemist/GPUImage,cesare-montresor/GPUImage,cnbin/GPUImage,hgl888/GPUImage,powerhome/GPUImage,lydonchandra/GPUImage,agan112/GPUImage,LIQC/GPUImage,jimydotorg/GGGPUImage,jsharp83/GPUImage,msfeldstein/GPUImage,kalsariyac/GPUImage,hanton/GPUImage,stelabouras/GPUImage,cesare-montresor/GPUImage,wfxiang08/GPUImage,silvansky/GPUImage,lauraskelton/GPUImage,AskaCai/GPUImage,dachaoisme/GPUImage,PlanetaToBe/GPUImage,efremidze/GPUImage,hanangellove/GPUImage,sujeking/GPUImage,0x4d52/GPUImage,BlessNeo/GPUImage,lily4ever/GPUImage,zhengfuzhe2013/GPUImage,xingyuniu/GPUImage,KBvsMJ/GPUImage,cocologics/GPUImage,YuAo/GPUImage,hanangellove/GPUImage,liduanw/GPUImage,jianwoo/GPUImage,Slin/GPUImage,pheed/GPUImage,SinnerSchraderMobileMirrors/GPUImage,rtsbtx/GPUImage,mattwymore/GPUImage,zzhhui/GPUImage,sujeking/GPUImage,kalsariyac/GPUImage,alex-learn/GPUImage,birthmark/GPUImage,Dexterp37/GPUImage,3drobotics/GPUImage,jianwoo/GPUImage,howandhao/GPUImage,sdonly/GPUImage,0x4d52/GPUImage,FiftyThree/GPUImage,rtsbtx/GPUImage,mervekeles/repo,lydonchandra/GPUImage,wangwei1237/GPUImage,evfemist/GPUImage,ohad7/GPUImage,picmonkey/GPUImage,BradLarson/GPUImage,pheed/GPUImage,xqz001/GPUImage,pengleelove/GPUImage,TimorCan/GPUImage,lacyrhoades/GPUImage,SanChain/GPUImage,zzhhui/GPUImage,SanChain/GPUImage,wangwei1237/GPUImage,powerhome/GPUImage,AlanJN/GPUImage,wfxiang08/GPUImage,pheed/GPUImage,hgl888/GPUImage,wysaid/GPUImage,appone/GPUImage,lacyrhoades/GPUImage,duanhjlt/GPUImage,duanhjlt/GPUImage,sansuiso/GPUImage,zilaiyedaren/GPUImage,mervekeles/GPUImage,TuSDK/GPUImage,ashishgabani/GPUImage,appone/GPUImage,wjszf/GPUImage,sansuiso/GPUImage,hoanganhx86/GPUImage,AlanJN/GPUImage,pcKanso/GPUImage,njucsyyh/GPUImage,ozetadev/GPUImage,bragil-massoud/GPUImage,jakeva/GPUImage,drewwilson/GPUImage,devluyy/GPUImage,nickplee/GPUImage,devluyy/GPUImage,KBvsMJ/GPUImage,pheed/GPUImage,Slin/GPUImage,Matzo/GPUImage,jackeychens/GPUImage,shi-yan/GPUImage,hanangellove/GPUImage,cocologics/GPUImage,denivip/GPUImage,thinkrad/GPUImage,zhengfuzhe2013/GPUImage,geowarsong/GPUImage,jackeychens/GPUImage,haikusw/GPUImage,xingyuniu/GPUImage,SuPair/GPUImage,yaoxiaoyong/GPUImage,mumer92/GPUImage,inb4ohnoes/GPUImage,zakinaeem/GPUImage,Kevin775263419/GPUImage,eighteight/GPUImage,FiftyThree/GPUImage,nonoBruce/GPUImage,937447974/GPUImage,jackeychens/GPUImage,shi-yan/GPUImage,ashishgabani/GPUImage,ask-fm/GPUImage,rocklive/GPUImage,ramoslin02/GPUImage,RavishankarDuMCA10/GPUImage,YuAo/GPUImage,loi32456/GPUImage,jimydotorg/GGGPUImage,mervekeles/repo,hakanw/GPUImage,sprint84/GPUImage,mattwymore/GPUImage,levyleo/LGPUImage,Learn-IOS/GPUImage,zakinaeem/GPUImage,drewwilson/GPUImage,mumer92/GPUImage,DavidWangTM/GPUImage,somegeekintn/GPUImage,java02014/GPUImage,yimouleng/GPUImage,mad102190/GPUImage,stelabouras/GPUImage,hakanw/GPUImage,headupinclouds/GPUImage,skonb/GPUImage,Slin/GPUImage,eighteight/GPUImage,FiftyThree/GPUImage,rFlex/GPUImage,lily4ever/GPUImage,YuAo/GPUImage,zhengfuzhe2013/GPUImage,AmiaoCat/GPUImage,horaceho/GPUImage,cocologics/GPUImage,DepositDev/GPUImage,AskaCai/GPUImage,Lcarvajal/GPUImage,SanjoDeundiak/GPUImage,java02014/GPUImage,silvansky/GPUImage,DavidWangTM/GPUImage,937447974/GPUImage,stelabouras/GPUImage,zakinaeem/GPUImage,sansuiso/GPUImage,faceleg/GPUImage,ohad7/GPUImage,111minutes/GPUImage,nickplee/GPUImage,tastyone/GPUImage,liduanw/GPUImage,kalsariyac/GPUImage,faceleg/GPUImage,cookov/GPUImage,gank0326/GPUImage,yshiga/GPUImage,UltravisualApp/GPUImage,odyth/GPUImage,SuPair/GPUImage,gank0326/GPUImage,mervekeles/GPUImage,jsharp83/GPUImage,mad102190/GPUImage,wysaid/GPUImage,jianwoo/GPUImage,StratAguilar/GPUImage,hyperconnect/GPUImage,jakeva/GPUImage,RavishankarDuMCA10/GPUImage,mtxs007/GPUImage,cocologics/GPUImage,ask-fm/GPUImage,jsharp83/GPUImage,pengleelove/GPUImage,wjszf/GPUImage,bragil-massoud/GPUImage,rromanchuk/GPUImage,csjlengxiang/GPUImage,lauraskelton/GPUImage,SanjoDeundiak/GPUImage,PlanetaToBe/GPUImage,hoanganhx86/GPUImage,mumer92/GPUImage,howandhao/GPUImage,hanton/GPUImage,efremidze/GPUImage,loi32456/GPUImage,SanChain/GPUImage,cookov/GPUImage,bgulanowski/GPUImage,mtxs007/GPUImage,Amnysia/GPUImage,bgulanowski/GPUImage,nickplee/GPUImage,UndaApp/GPUImage,r3mus/GPUImage,msfeldstein/GPUImage,mingming1986/GPUImage,pengleelove/GPUImage,birthmark/GPUImage,Matzo/GPUImage,tastyone/GPUImage,picmonkey/GPUImage,SinnerSchraderMobileMirrors/GPUImage,ramoslin02/GPUImage,dawangjiaowolaixunshan/GPUImage,j364960953/GPUImage,hakanw/GPUImage,pevasquez/GPUImage,SinnerSchraderMobileMirrors/GPUImage,silvansky/GPUImage,BlessNeo/GPUImage,wjszf/GPUImage,FiftyThree/GPUImage,Learn-IOS/GPUImage,skonb/GPUImage,cesare-montresor/GPUImage,dawangjiaowolaixunshan/GPUImage,Lcarvajal/GPUImage,smule/GPUImage,pcKanso/GPUImage,sprint84/GPUImage,sprint84/GPUImage,denivip/GPUImage,IncredibleDucky/GPUImage,r3mus/GPUImage,yaoxiaoyong/GPUImage,hyperconnect/GPUImage,TuSDK/GPUImage,eunmin/GPUImage,3drobotics/GPUImage,Hybridity/GPUImage,tastyone/GPUImage,catbus/GPUImage,inb4ohnoes/GPUImage,pevasquez/GPUImage,appone/GPUImage,agan112/GPUImage,headupinclouds/GPUImage,levyleo/LGPUImage,eighteight/GPUImage,DavidWangTM/GPUImage,thinkrad/GPUImage,PodRepo/GPUImage,Naithar/GPUImage,odyth/GPUImage,SinnerSchraderMobileMirrors/GPUImage,tuo/GPUImage,rromanchuk/GPUImage,appone/GPUImage,ohad7/GPUImage,njucsyyh/GPUImage,somegeekintn/GPUImage,0x4d52/GPUImage,hoanganhx86/GPUImage,xingyuniu/GPUImage,Creolophus/GPUImage,headupinclouds/GPUImage,cesare-montresor/GPUImage,sdonly/GPUImage,UltravisualApp/GPUImage,zilaiyedaren/GPUImage,Creolophus/GPUImage,catbus/GPUImage,jimydotorg/GGGPUImage,HSFGitHub/GPUImage,u-engine/GPUImage,horaceho/GPUImage,yimouleng/GPUImage,howandhao/GPUImage,LIQC/GPUImage,bragil-massoud/GPUImage,csjlengxiang/GPUImage,u-engine/GPUImage,KBvsMJ/GPUImage,j364960953/GPUImage,IncredibleDucky/GPUImage,lauraskelton/GPUImage,Hybridity/GPUImage,DepositDev/GPUImage,lauraskelton/GPUImage,mad102190/GPUImage,agan112/GPUImage,picmonkey/GPUImage,TimorCan/GPUImage,Learn-IOS/GPUImage,eunmin/GPUImage,faceleg/GPUImage,rocklive/GPUImage,skonb/GPUImage,SanjoDeundiak/GPUImage,odyth/GPUImage,haikusw/GPUImage,cnbin/GPUImage,nonoBruce/GPUImage,gank0326/GPUImage,IncredibleDucky/GPUImage,Amnysia/GPUImage,smule/GPUImage,xqz001/GPUImage,Dexterp37/GPUImage,rtsbtx/GPUImage,PodRepo/GPUImage,UndaApp/GPUImage,wysaid/GPUImage,bgulanowski/GPUImage
|
c
|
## Code Before:
@interface GPUImageFilterPipeline : NSObject
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageFilter*)filter;
- (void) addFilter:(GPUImageFilter*)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter*)filter;
- (void) replaceAllFilters:(NSArray*) newFilters;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
@end
## Instruction:
Bugfix: Make pipeline work for CGImage processing (missed out the header file)
## Code After:
@interface GPUImageFilterPipeline : NSObject
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageFilter*)filter;
- (void) addFilter:(GPUImageFilter*)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter*)filter;
- (void) replaceAllFilters:(NSArray*) newFilters;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
- (CGImageRef) newCGImageFromCurrentFilteredFrame;
@end
|
// ... existing code ...
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
- (CGImageRef) newCGImageFromCurrentFilteredFrame;
@end
// ... rest of the code ...
|
05497eb3970d943008d265a498aafa24e311014b
|
android/java/com/sometrik/framework/FWPicker.java
|
android/java/com/sometrik/framework/FWPicker.java
|
package com.sometrik.framework;
import java.util.ArrayList;
import java.util.TreeMap;
import android.content.Context;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
public class FWPicker extends Spinner implements NativeMessageHandler {
ArrayAdapter<String> adapter;
ArrayList<Integer> numberList;
TreeMap<Integer, String> valueMap;
public FWPicker(Context context) {
super(context);
adapter = new ArrayAdapter<String>(context, android.R.layout.simple_spinner_item);
valueMap = new TreeMap<Integer, String>();
numberList = new ArrayList<Integer>();
}
@Override
public void handleMessage(NativeMessage message) {
switch (message.getMessage()) {
case ADD_OPTION:
valueMap.put(message.getValue(), message.getTextValue());
numberList.add(message.getValue());
for (int i = 0; i < numberList.size(); i++) {
adapter.add(valueMap.get(numberList.get(i)));
}
setAdapter(adapter);
break;
default:
System.out.println("Message couldn't be handled by Picker");
break;
}
}
@Override
public void showView() {
}
@Override
public int getElementId() {
return getId();
}
}
|
package com.sometrik.framework;
import java.util.ArrayList;
import java.util.TreeMap;
import android.content.Context;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
public class FWPicker extends Spinner implements NativeMessageHandler {
private ArrayAdapter<String> adapter;
private ArrayList<Integer> numberList;
private TreeMap<Integer, String> valueMap;
private native void pickerOptionSelected(int id);
public FWPicker(Context context) {
super(context);
adapter = new ArrayAdapter<String>(context, android.R.layout.simple_spinner_item);
valueMap = new TreeMap<Integer, String>();
numberList = new ArrayList<Integer>();
setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int position, long arg3) {
pickerOptionSelected(position);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
@Override
public void handleMessage(NativeMessage message) {
switch (message.getMessage()) {
case ADD_OPTION:
valueMap.put(message.getValue(), message.getTextValue());
numberList.add(message.getValue());
for (int i = 0; i < numberList.size(); i++) {
adapter.add(valueMap.get(numberList.get(i)));
}
setAdapter(adapter);
break;
default:
System.out.println("Message couldn't be handled by Picker");
break;
}
}
@Override
public void showView() {
}
@Override
public int getElementId() {
return getId();
}
}
|
Add onSelectedListener and native function call to picker
|
Add onSelectedListener and native function call to picker
|
Java
|
mit
|
Sometrik/framework,Sometrik/framework,Sometrik/framework
|
java
|
## Code Before:
package com.sometrik.framework;
import java.util.ArrayList;
import java.util.TreeMap;
import android.content.Context;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
public class FWPicker extends Spinner implements NativeMessageHandler {
ArrayAdapter<String> adapter;
ArrayList<Integer> numberList;
TreeMap<Integer, String> valueMap;
public FWPicker(Context context) {
super(context);
adapter = new ArrayAdapter<String>(context, android.R.layout.simple_spinner_item);
valueMap = new TreeMap<Integer, String>();
numberList = new ArrayList<Integer>();
}
@Override
public void handleMessage(NativeMessage message) {
switch (message.getMessage()) {
case ADD_OPTION:
valueMap.put(message.getValue(), message.getTextValue());
numberList.add(message.getValue());
for (int i = 0; i < numberList.size(); i++) {
adapter.add(valueMap.get(numberList.get(i)));
}
setAdapter(adapter);
break;
default:
System.out.println("Message couldn't be handled by Picker");
break;
}
}
@Override
public void showView() {
}
@Override
public int getElementId() {
return getId();
}
}
## Instruction:
Add onSelectedListener and native function call to picker
## Code After:
package com.sometrik.framework;
import java.util.ArrayList;
import java.util.TreeMap;
import android.content.Context;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
public class FWPicker extends Spinner implements NativeMessageHandler {
private ArrayAdapter<String> adapter;
private ArrayList<Integer> numberList;
private TreeMap<Integer, String> valueMap;
private native void pickerOptionSelected(int id);
public FWPicker(Context context) {
super(context);
adapter = new ArrayAdapter<String>(context, android.R.layout.simple_spinner_item);
valueMap = new TreeMap<Integer, String>();
numberList = new ArrayList<Integer>();
setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int position, long arg3) {
pickerOptionSelected(position);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
@Override
public void handleMessage(NativeMessage message) {
switch (message.getMessage()) {
case ADD_OPTION:
valueMap.put(message.getValue(), message.getTextValue());
numberList.add(message.getValue());
for (int i = 0; i < numberList.size(); i++) {
adapter.add(valueMap.get(numberList.get(i)));
}
setAdapter(adapter);
break;
default:
System.out.println("Message couldn't be handled by Picker");
break;
}
}
@Override
public void showView() {
}
@Override
public int getElementId() {
return getId();
}
}
|
# ... existing code ...
import java.util.TreeMap;
import android.content.Context;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
public class FWPicker extends Spinner implements NativeMessageHandler {
private ArrayAdapter<String> adapter;
private ArrayList<Integer> numberList;
private TreeMap<Integer, String> valueMap;
private native void pickerOptionSelected(int id);
public FWPicker(Context context) {
super(context);
# ... modified code ...
adapter = new ArrayAdapter<String>(context, android.R.layout.simple_spinner_item);
valueMap = new TreeMap<Integer, String>();
numberList = new ArrayList<Integer>();
setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int position, long arg3) {
pickerOptionSelected(position);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
@Override
# ... rest of the code ...
|
51f6272870e4e72d2364b2c2f660457b5c9286ef
|
doc/sample_code/search_forking_pro.py
|
doc/sample_code/search_forking_pro.py
|
import os
import sys
sys.path.append('./../../')
from pyogi.ki2converter import *
from pyogi.kifu import *
if __name__ == '__main__':
for n in range(0, 50000):
n1 = (n // 10000)
n2 = int(n < 10000)
relpath = '~/data/shogi/2chkifu/{0}000{1}/{2:0>5}.KI2'.format(n1, n2, n)
kifile = os.path.expanduser(relpath)
if not os.path.exists(kifile):
continue
ki2converter = Ki2converter()
ki2converter.from_path(kifile)
csa = ki2converter.to_csa()
kifu = Kifu(csa)
res = kifu.get_forking(['OU', 'HI'])
if res[2] or res[3]:
print(kifu.players)
|
import os
import sys
import pandas as pd
sys.path.append('./../../')
from pyogi.ki2converter import *
from pyogi.kifu import *
if __name__ == '__main__':
res_table = []
for n in range(0, 50000):
n1 = (n // 10000)
n2 = int(n < 10000)
relpath = '~/data/shogi/2chkifu/{0}000{1}/{2:0>5}.KI2'.format(n1, n2, n)
kifile = os.path.expanduser(relpath)
if not os.path.exists(kifile):
continue
ki2converter = Ki2converter()
ki2converter.from_path(kifile)
csa = ki2converter.to_csa()
if not csa:
continue
kifu = Kifu(csa)
res = kifu.get_forking(['OU', 'HI'])
if res[2] or res[3]:
print(kifu.players)
# Output
# 1. sente forked | gote forked
# 2. (sente won & sente forked) | (gote won & gote forked)
res_table.append(
[res[2] != [] or res[3] != [],
(kifu.sente_win and res[2]!=[]) or
((not kifu.sente_win) and res[3]!=[])])
df = pd.DataFrame(res_table, columns=['fork', 'fork&win'])
pd.crosstab(df.loc[:, 'fork'], df.loc[:, 'fork&win'])
|
Add sum up part using pd.crosstab
|
Add sum up part using pd.crosstab
|
Python
|
mit
|
tosh1ki/pyogi,tosh1ki/pyogi
|
python
|
## Code Before:
import os
import sys
sys.path.append('./../../')
from pyogi.ki2converter import *
from pyogi.kifu import *
if __name__ == '__main__':
for n in range(0, 50000):
n1 = (n // 10000)
n2 = int(n < 10000)
relpath = '~/data/shogi/2chkifu/{0}000{1}/{2:0>5}.KI2'.format(n1, n2, n)
kifile = os.path.expanduser(relpath)
if not os.path.exists(kifile):
continue
ki2converter = Ki2converter()
ki2converter.from_path(kifile)
csa = ki2converter.to_csa()
kifu = Kifu(csa)
res = kifu.get_forking(['OU', 'HI'])
if res[2] or res[3]:
print(kifu.players)
## Instruction:
Add sum up part using pd.crosstab
## Code After:
import os
import sys
import pandas as pd
sys.path.append('./../../')
from pyogi.ki2converter import *
from pyogi.kifu import *
if __name__ == '__main__':
res_table = []
for n in range(0, 50000):
n1 = (n // 10000)
n2 = int(n < 10000)
relpath = '~/data/shogi/2chkifu/{0}000{1}/{2:0>5}.KI2'.format(n1, n2, n)
kifile = os.path.expanduser(relpath)
if not os.path.exists(kifile):
continue
ki2converter = Ki2converter()
ki2converter.from_path(kifile)
csa = ki2converter.to_csa()
if not csa:
continue
kifu = Kifu(csa)
res = kifu.get_forking(['OU', 'HI'])
if res[2] or res[3]:
print(kifu.players)
# Output
# 1. sente forked | gote forked
# 2. (sente won & sente forked) | (gote won & gote forked)
res_table.append(
[res[2] != [] or res[3] != [],
(kifu.sente_win and res[2]!=[]) or
((not kifu.sente_win) and res[3]!=[])])
df = pd.DataFrame(res_table, columns=['fork', 'fork&win'])
pd.crosstab(df.loc[:, 'fork'], df.loc[:, 'fork&win'])
|
...
import os
import sys
import pandas as pd
sys.path.append('./../../')
from pyogi.ki2converter import *
...
if __name__ == '__main__':
res_table = []
for n in range(0, 50000):
...
csa = ki2converter.to_csa()
if not csa:
continue
kifu = Kifu(csa)
res = kifu.get_forking(['OU', 'HI'])
if res[2] or res[3]:
print(kifu.players)
# Output
# 1. sente forked | gote forked
# 2. (sente won & sente forked) | (gote won & gote forked)
res_table.append(
[res[2] != [] or res[3] != [],
(kifu.sente_win and res[2]!=[]) or
((not kifu.sente_win) and res[3]!=[])])
df = pd.DataFrame(res_table, columns=['fork', 'fork&win'])
pd.crosstab(df.loc[:, 'fork'], df.loc[:, 'fork&win'])
...
|
fc609dd987593d58cddec3af8865a1d3a456fb43
|
modules/expansion/dns.py
|
modules/expansion/dns.py
|
import json
import dns.resolver
mispattributes = {'input':['hostname', 'domain'], 'output':['ip-src', 'ip-dst']}
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if request.get('hostname'):
toquery = request['hostname']
elif request.get('domain'):
toquery = request['domain']
else:
return False
r = dns.resolver.Resolver()
r.nameservers = ['8.8.8.8']
try:
answer = r.query(toquery, 'A')
except dns.resolver.NXDOMAIN:
return False
except dns.exception.Timeout:
return False
r = {'results':[{'types':mispattributes['output'], 'values':[str(answer[0])]}]}
return r
def introspection():
return mispattributes['input']
|
import json
import dns.resolver
mispattributes = {'input':['hostname', 'domain'], 'output':['ip-src', 'ip-dst']}
moduleinfo = "0.1"
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if request.get('hostname'):
toquery = request['hostname']
elif request.get('domain'):
toquery = request['domain']
else:
return False
r = dns.resolver.Resolver()
r.nameservers = ['8.8.8.8']
try:
answer = r.query(toquery, 'A')
except dns.resolver.NXDOMAIN:
return False
except dns.exception.Timeout:
return False
r = {'results':[{'types':mispattributes['output'], 'values':[str(answer[0])]}]}
return r
def introspection():
return mispattributes
def version():
return moduleinfo
|
Add a version per default
|
Add a version per default
|
Python
|
agpl-3.0
|
amuehlem/misp-modules,amuehlem/misp-modules,Rafiot/misp-modules,VirusTotal/misp-modules,amuehlem/misp-modules,MISP/misp-modules,VirusTotal/misp-modules,Rafiot/misp-modules,Rafiot/misp-modules,VirusTotal/misp-modules,MISP/misp-modules,MISP/misp-modules
|
python
|
## Code Before:
import json
import dns.resolver
mispattributes = {'input':['hostname', 'domain'], 'output':['ip-src', 'ip-dst']}
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if request.get('hostname'):
toquery = request['hostname']
elif request.get('domain'):
toquery = request['domain']
else:
return False
r = dns.resolver.Resolver()
r.nameservers = ['8.8.8.8']
try:
answer = r.query(toquery, 'A')
except dns.resolver.NXDOMAIN:
return False
except dns.exception.Timeout:
return False
r = {'results':[{'types':mispattributes['output'], 'values':[str(answer[0])]}]}
return r
def introspection():
return mispattributes['input']
## Instruction:
Add a version per default
## Code After:
import json
import dns.resolver
mispattributes = {'input':['hostname', 'domain'], 'output':['ip-src', 'ip-dst']}
moduleinfo = "0.1"
def handler(q=False):
if q is False:
return False
request = json.loads(q)
if request.get('hostname'):
toquery = request['hostname']
elif request.get('domain'):
toquery = request['domain']
else:
return False
r = dns.resolver.Resolver()
r.nameservers = ['8.8.8.8']
try:
answer = r.query(toquery, 'A')
except dns.resolver.NXDOMAIN:
return False
except dns.exception.Timeout:
return False
r = {'results':[{'types':mispattributes['output'], 'values':[str(answer[0])]}]}
return r
def introspection():
return mispattributes
def version():
return moduleinfo
|
// ... existing code ...
import dns.resolver
mispattributes = {'input':['hostname', 'domain'], 'output':['ip-src', 'ip-dst']}
moduleinfo = "0.1"
def handler(q=False):
if q is False:
// ... modified code ...
def introspection():
return mispattributes
def version():
return moduleinfo
// ... rest of the code ...
|
81de6e841c6775e619b94c12be49969be9d68968
|
libmypaint.c
|
libmypaint.c
|
/* Include this file in your project
* if you don't want to build libmypaint as a separate library
* Note that still need to do -I./path/to/libmypaint/sources
* for the includes here to succeed. */
#include "mapping.c"
#include "helpers.c"
#include "brushmodes.c"
#include "fifo.c"
#include "operationqueue.c"
#include "rng-double.c"
#include "utils.c"
#include "tilemap.c"
#include "mypaint.c"
#include "mypaint-brush.c"
#include "mypaint-brush-settings.c"
#include "mypaint-fixed-tiled-surface.c"
#include "mypaint-surface.c"
#include "mypaint-tiled-surface.c"
#include "mypaint-rectangle.c"
|
/* Include this file in your project
* if you don't want to build libmypaint as a separate library
* Note that still need to do -I./path/to/libmypaint/sources
* for the includes here to succeed. */
#include "helpers.c"
#include "brushmodes.c"
#include "fifo.c"
#include "operationqueue.c"
#include "rng-double.c"
#include "utils.c"
#include "tilemap.c"
#include "mypaint.c"
#include "mypaint-brush.c"
#include "mypaint-brush-settings.c"
#include "mypaint-fixed-tiled-surface.c"
#include "mypaint-surface.c"
#include "mypaint-tiled-surface.c"
#include "mypaint-rectangle.c"
#include "mypaint-mapping.c"
|
Remove a lingering ref to mapping.c
|
Remove a lingering ref to mapping.c
|
C
|
isc
|
achadwick/libmypaint,achadwick/libmypaint,achadwick/libmypaint,achadwick/libmypaint
|
c
|
## Code Before:
/* Include this file in your project
* if you don't want to build libmypaint as a separate library
* Note that still need to do -I./path/to/libmypaint/sources
* for the includes here to succeed. */
#include "mapping.c"
#include "helpers.c"
#include "brushmodes.c"
#include "fifo.c"
#include "operationqueue.c"
#include "rng-double.c"
#include "utils.c"
#include "tilemap.c"
#include "mypaint.c"
#include "mypaint-brush.c"
#include "mypaint-brush-settings.c"
#include "mypaint-fixed-tiled-surface.c"
#include "mypaint-surface.c"
#include "mypaint-tiled-surface.c"
#include "mypaint-rectangle.c"
## Instruction:
Remove a lingering ref to mapping.c
## Code After:
/* Include this file in your project
* if you don't want to build libmypaint as a separate library
* Note that still need to do -I./path/to/libmypaint/sources
* for the includes here to succeed. */
#include "helpers.c"
#include "brushmodes.c"
#include "fifo.c"
#include "operationqueue.c"
#include "rng-double.c"
#include "utils.c"
#include "tilemap.c"
#include "mypaint.c"
#include "mypaint-brush.c"
#include "mypaint-brush-settings.c"
#include "mypaint-fixed-tiled-surface.c"
#include "mypaint-surface.c"
#include "mypaint-tiled-surface.c"
#include "mypaint-rectangle.c"
#include "mypaint-mapping.c"
|
...
* Note that still need to do -I./path/to/libmypaint/sources
* for the includes here to succeed. */
#include "helpers.c"
#include "brushmodes.c"
#include "fifo.c"
...
#include "mypaint-surface.c"
#include "mypaint-tiled-surface.c"
#include "mypaint-rectangle.c"
#include "mypaint-mapping.c"
...
|
54b6fb0bf9551a55881d5d82fdcb3c6404345d63
|
src/main/java/dev/nincodedo/ninbot/components/reaction/ReactionResponse.java
|
src/main/java/dev/nincodedo/ninbot/components/reaction/ReactionResponse.java
|
package dev.nincodedo.ninbot.components.reaction;
import lombok.Data;
import lombok.NoArgsConstructor;
import net.dv8tion.jda.api.entities.Message;
import net.dv8tion.jda.api.entities.MessageChannel;
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
import java.util.List;
@NoArgsConstructor
@Data
abstract class ReactionResponse {
protected String response;
protected String target;
protected List<String> responses;
protected ReactionMatchType reactionMatchType;
public ReactionResponse(ReactionResponse reactionResponse) {
this.response = reactionResponse.response;
this.target = reactionResponse.target;
this.responses = reactionResponse.responses;
this.reactionMatchType = reactionResponse.reactionMatchType;
}
abstract void react(Message message, MessageChannel channel);
boolean canRespond(MessageReceivedEvent event) {
return switch (reactionMatchType) {
case EXACT -> getTarget()
.equalsIgnoreCase(event.getMessage().getContentStripped());
case CONTAINS -> event.getMessage()
.getContentStripped()
.toLowerCase()
.contains(getTarget()
.toLowerCase());
case REGEX -> event.getMessage().getContentStripped().matches(getTarget());
};
}
}
|
package dev.nincodedo.ninbot.components.reaction;
import lombok.Data;
import net.dv8tion.jda.api.entities.Message;
import net.dv8tion.jda.api.entities.MessageChannel;
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
import java.util.List;
@Data
class ReactionResponse {
protected String response;
protected String target;
protected List<String> responses;
protected ReactionMatchType reactionMatchType;
public ReactionResponse() {
}
public ReactionResponse(ReactionResponse reactionResponse) {
this.response = reactionResponse.response;
this.target = reactionResponse.target;
this.responses = reactionResponse.responses;
this.reactionMatchType = reactionResponse.reactionMatchType;
}
void react(Message message, MessageChannel channel) {
//NO-OP
}
boolean canRespond(MessageReceivedEvent event) {
return switch (reactionMatchType) {
case EXACT -> getTarget()
.equalsIgnoreCase(event.getMessage().getContentStripped());
case CONTAINS -> event.getMessage()
.getContentStripped()
.toLowerCase()
.contains(getTarget()
.toLowerCase());
case REGEX -> event.getMessage().getContentStripped().matches(getTarget());
};
}
}
|
Fix loading issue for reactions
|
Fix loading issue for reactions
|
Java
|
mit
|
Nincodedo/Ninbot
|
java
|
## Code Before:
package dev.nincodedo.ninbot.components.reaction;
import lombok.Data;
import lombok.NoArgsConstructor;
import net.dv8tion.jda.api.entities.Message;
import net.dv8tion.jda.api.entities.MessageChannel;
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
import java.util.List;
@NoArgsConstructor
@Data
abstract class ReactionResponse {
protected String response;
protected String target;
protected List<String> responses;
protected ReactionMatchType reactionMatchType;
public ReactionResponse(ReactionResponse reactionResponse) {
this.response = reactionResponse.response;
this.target = reactionResponse.target;
this.responses = reactionResponse.responses;
this.reactionMatchType = reactionResponse.reactionMatchType;
}
abstract void react(Message message, MessageChannel channel);
boolean canRespond(MessageReceivedEvent event) {
return switch (reactionMatchType) {
case EXACT -> getTarget()
.equalsIgnoreCase(event.getMessage().getContentStripped());
case CONTAINS -> event.getMessage()
.getContentStripped()
.toLowerCase()
.contains(getTarget()
.toLowerCase());
case REGEX -> event.getMessage().getContentStripped().matches(getTarget());
};
}
}
## Instruction:
Fix loading issue for reactions
## Code After:
package dev.nincodedo.ninbot.components.reaction;
import lombok.Data;
import net.dv8tion.jda.api.entities.Message;
import net.dv8tion.jda.api.entities.MessageChannel;
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
import java.util.List;
@Data
class ReactionResponse {
protected String response;
protected String target;
protected List<String> responses;
protected ReactionMatchType reactionMatchType;
public ReactionResponse() {
}
public ReactionResponse(ReactionResponse reactionResponse) {
this.response = reactionResponse.response;
this.target = reactionResponse.target;
this.responses = reactionResponse.responses;
this.reactionMatchType = reactionResponse.reactionMatchType;
}
void react(Message message, MessageChannel channel) {
//NO-OP
}
boolean canRespond(MessageReceivedEvent event) {
return switch (reactionMatchType) {
case EXACT -> getTarget()
.equalsIgnoreCase(event.getMessage().getContentStripped());
case CONTAINS -> event.getMessage()
.getContentStripped()
.toLowerCase()
.contains(getTarget()
.toLowerCase());
case REGEX -> event.getMessage().getContentStripped().matches(getTarget());
};
}
}
|
...
package dev.nincodedo.ninbot.components.reaction;
import lombok.Data;
import net.dv8tion.jda.api.entities.Message;
import net.dv8tion.jda.api.entities.MessageChannel;
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
...
import java.util.List;
@Data
class ReactionResponse {
protected String response;
protected String target;
protected List<String> responses;
protected ReactionMatchType reactionMatchType;
public ReactionResponse() {
}
public ReactionResponse(ReactionResponse reactionResponse) {
this.response = reactionResponse.response;
...
this.reactionMatchType = reactionResponse.reactionMatchType;
}
void react(Message message, MessageChannel channel) {
//NO-OP
}
boolean canRespond(MessageReceivedEvent event) {
return switch (reactionMatchType) {
...
|
d49ef15aca8b9955e02b8719f238cc3a4ea26602
|
dev/__init__.py
|
dev/__init__.py
|
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "ocspbuilder"
other_packages = []
requires_oscrypto = True
has_tests_package = False
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {
'docs/api.md': ['ocspbuilder/__init__.py'],
}
definition_replacements = {}
|
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "ocspbuilder"
other_packages = []
task_keyword_args = []
requires_oscrypto = True
has_tests_package = False
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {
'docs/api.md': ['ocspbuilder/__init__.py'],
}
definition_replacements = {}
|
Add missing dev config variable
|
Add missing dev config variable
|
Python
|
mit
|
wbond/ocspbuilder
|
python
|
## Code Before:
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "ocspbuilder"
other_packages = []
requires_oscrypto = True
has_tests_package = False
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {
'docs/api.md': ['ocspbuilder/__init__.py'],
}
definition_replacements = {}
## Instruction:
Add missing dev config variable
## Code After:
from __future__ import unicode_literals, division, absolute_import, print_function
import os
package_name = "ocspbuilder"
other_packages = []
task_keyword_args = []
requires_oscrypto = True
has_tests_package = False
package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
build_root = os.path.abspath(os.path.join(package_root, '..'))
md_source_map = {
'docs/api.md': ['ocspbuilder/__init__.py'],
}
definition_replacements = {}
|
# ... existing code ...
package_name = "ocspbuilder"
other_packages = []
task_keyword_args = []
requires_oscrypto = True
has_tests_package = False
# ... rest of the code ...
|
28afc9f6f81e1e7ed94e2ec561ef321bff8bb56a
|
sphinxdoc/urls.py
|
sphinxdoc/urls.py
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>(([\w-]+)/)+)$',
'documentation',
name='doc-detail',
),
)
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
Support more general documentation path names.
|
Support more general documentation path names.
|
Python
|
bsd-3-clause
|
30loops/django-sphinxdoc,kamni/django-sphinxdoc
|
python
|
## Code Before:
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>(([\w-]+)/)+)$',
'documentation',
name='doc-detail',
),
)
## Instruction:
Support more general documentation path names.
## Code After:
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
// ... existing code ...
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
// ... rest of the code ...
|
a1e1f0661331f5bf8faa81210eae2cad0c2ad7b3
|
calico_containers/tests/st/__init__.py
|
calico_containers/tests/st/__init__.py
|
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("calico_containers/busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
Fix bug in file path.
|
Fix bug in file path.
|
Python
|
apache-2.0
|
dalanlan/calico-docker,projectcalico/calico-docker,L-MA/calico-docker,robbrockbank/calicoctl,insequent/calico-docker,TeaBough/calico-docker,Metaswitch/calico-docker,CiscoCloud/calico-docker,frostynova/calico-docker,L-MA/calico-docker,insequent/calico-docker,webwurst/calico-docker,Symmetric/calico-docker,TrimBiggs/calico-containers,projectcalico/calico-docker,alexhersh/calico-docker,robbrockbank/calico-docker,webwurst/calico-docker,quater/calico-containers,tomdee/calico-containers,tomdee/calico-docker,fasaxc/calicoctl,CiscoCloud/calico-docker,TeaBough/calico-docker,robbrockbank/calicoctl,robbrockbank/calico-containers,fasaxc/calicoctl,fasaxc/calico-docker,projectcalico/calico-containers,tomdee/calico-containers,robbrockbank/calico-containers,TrimBiggs/calico-docker,projectcalico/calico-containers,johscheuer/calico-docker,caseydavenport/calico-docker,caseydavenport/calico-containers,TrimBiggs/calico-docker,johscheuer/calico-docker,frostynova/calico-docker,alexhersh/calico-docker,quater/calico-containers,Symmetric/calico-docker,fasaxc/calico-docker,caseydavenport/calico-docker,caseydavenport/calico-containers,tomdee/calico-docker,Metaswitch/calico-docker,dalanlan/calico-docker,caseydavenport/calico-containers,robbrockbank/calico-docker,projectcalico/calico-containers,TrimBiggs/calico-containers
|
python
|
## Code Before:
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
## Instruction:
Fix bug in file path.
## Code After:
import os
import sh
from sh import docker
def setup_package():
"""
Sets up docker images and host containers for running the STs.
"""
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("calico_containers/busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
# Create the calicoctl binary here so it will be in the volume mounted on the hosts.
print sh.bash("./create_binary.sh")
def teardown_package():
pass
|
...
# Pull and save each image, so we can use them inside the host containers.
print sh.bash("./build_node.sh").stdout
docker.save("--output", "calico_containers/calico-node.tar", "calico/node")
if not os.path.isfile("calico_containers/busybox.tar"):
docker.pull("busybox:latest")
docker.save("--output", "calico_containers/busybox.tar", "busybox:latest")
...
|
699c82a9fbff68716f34f465f0b562811a73fbb9
|
src/test/java/org/kaazing/qpid/amqp_1_0/jms/TopicProducerIT.java
|
src/test/java/org/kaazing/qpid/amqp_1_0/jms/TopicProducerIT.java
|
/*
* Copyright (c) 2007-2014, Kaazing Corporation. All rights reserved.
*/
package org.kaazing.qpid.amqp_1_0.jms;
import static javax.jms.Session.AUTO_ACKNOWLEDGE;
import org.apache.qpid.amqp_1_0.jms.Connection;
import org.apache.qpid.amqp_1_0.jms.ConnectionFactory;
import org.apache.qpid.amqp_1_0.jms.Session;
import org.apache.qpid.amqp_1_0.jms.impl.ConnectionFactoryImpl;
import org.junit.Rule;
import org.junit.Test;
import org.kaazing.robot.junit.annotation.Robotic;
import org.kaazing.robot.junit.rules.RobotRule;
public class TopicProducerIT {
@Rule
public RobotRule robot = new RobotRule().setScriptRoot("org/kaazing/robot/scripts/amqp_1_0/jms/topic/producer");
@Robotic(script = "create")
@Test(timeout = 1000)
public void shouldCreateProducer() throws Exception {
ConnectionFactory factory = new ConnectionFactoryImpl("localhost", 5672, null, null, "clientID");
Connection connection = factory.createConnection();
connection.start();
Session session = connection.createSession(false, AUTO_ACKNOWLEDGE);
session.createProducer(session.createTopic("topic-A"));
robot.join();
}
}
|
/*
* Copyright (c) 2007-2014, Kaazing Corporation. All rights reserved.
*/
package org.kaazing.qpid.amqp_1_0.jms;
import static javax.jms.Session.AUTO_ACKNOWLEDGE;
import org.apache.qpid.amqp_1_0.jms.Connection;
import org.apache.qpid.amqp_1_0.jms.ConnectionFactory;
import org.apache.qpid.amqp_1_0.jms.MessageProducer;
import org.apache.qpid.amqp_1_0.jms.Session;
import org.apache.qpid.amqp_1_0.jms.impl.ConnectionFactoryImpl;
import org.junit.Rule;
import org.junit.Test;
import org.kaazing.robot.junit.annotation.Robotic;
import org.kaazing.robot.junit.rules.RobotRule;
public class TopicProducerIT {
@Rule
public RobotRule robot = new RobotRule().setScriptRoot("org/kaazing/robot/scripts/amqp_1_0/jms/topic/producer");
@Robotic(script = "create.then.close")
@Test(timeout = 1000)
public void shouldCreateProducer() throws Exception {
ConnectionFactory factory = new ConnectionFactoryImpl("localhost", 5672, null, null, "clientID");
Connection connection = factory.createConnection();
connection.start();
Session session = connection.createSession(false, AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(session.createTopic("topic://topic-A"));
producer.close();
session.close();
connection.close();
robot.join();
}
}
|
Refactor producer behavior to include close.
|
Refactor producer behavior to include close.
|
Java
|
apache-2.0
|
kaazing/qpid.jms.itest
|
java
|
## Code Before:
/*
* Copyright (c) 2007-2014, Kaazing Corporation. All rights reserved.
*/
package org.kaazing.qpid.amqp_1_0.jms;
import static javax.jms.Session.AUTO_ACKNOWLEDGE;
import org.apache.qpid.amqp_1_0.jms.Connection;
import org.apache.qpid.amqp_1_0.jms.ConnectionFactory;
import org.apache.qpid.amqp_1_0.jms.Session;
import org.apache.qpid.amqp_1_0.jms.impl.ConnectionFactoryImpl;
import org.junit.Rule;
import org.junit.Test;
import org.kaazing.robot.junit.annotation.Robotic;
import org.kaazing.robot.junit.rules.RobotRule;
public class TopicProducerIT {
@Rule
public RobotRule robot = new RobotRule().setScriptRoot("org/kaazing/robot/scripts/amqp_1_0/jms/topic/producer");
@Robotic(script = "create")
@Test(timeout = 1000)
public void shouldCreateProducer() throws Exception {
ConnectionFactory factory = new ConnectionFactoryImpl("localhost", 5672, null, null, "clientID");
Connection connection = factory.createConnection();
connection.start();
Session session = connection.createSession(false, AUTO_ACKNOWLEDGE);
session.createProducer(session.createTopic("topic-A"));
robot.join();
}
}
## Instruction:
Refactor producer behavior to include close.
## Code After:
/*
* Copyright (c) 2007-2014, Kaazing Corporation. All rights reserved.
*/
package org.kaazing.qpid.amqp_1_0.jms;
import static javax.jms.Session.AUTO_ACKNOWLEDGE;
import org.apache.qpid.amqp_1_0.jms.Connection;
import org.apache.qpid.amqp_1_0.jms.ConnectionFactory;
import org.apache.qpid.amqp_1_0.jms.MessageProducer;
import org.apache.qpid.amqp_1_0.jms.Session;
import org.apache.qpid.amqp_1_0.jms.impl.ConnectionFactoryImpl;
import org.junit.Rule;
import org.junit.Test;
import org.kaazing.robot.junit.annotation.Robotic;
import org.kaazing.robot.junit.rules.RobotRule;
public class TopicProducerIT {
@Rule
public RobotRule robot = new RobotRule().setScriptRoot("org/kaazing/robot/scripts/amqp_1_0/jms/topic/producer");
@Robotic(script = "create.then.close")
@Test(timeout = 1000)
public void shouldCreateProducer() throws Exception {
ConnectionFactory factory = new ConnectionFactoryImpl("localhost", 5672, null, null, "clientID");
Connection connection = factory.createConnection();
connection.start();
Session session = connection.createSession(false, AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(session.createTopic("topic://topic-A"));
producer.close();
session.close();
connection.close();
robot.join();
}
}
|
# ... existing code ...
import org.apache.qpid.amqp_1_0.jms.Connection;
import org.apache.qpid.amqp_1_0.jms.ConnectionFactory;
import org.apache.qpid.amqp_1_0.jms.MessageProducer;
import org.apache.qpid.amqp_1_0.jms.Session;
import org.apache.qpid.amqp_1_0.jms.impl.ConnectionFactoryImpl;
import org.junit.Rule;
# ... modified code ...
@Rule
public RobotRule robot = new RobotRule().setScriptRoot("org/kaazing/robot/scripts/amqp_1_0/jms/topic/producer");
@Robotic(script = "create.then.close")
@Test(timeout = 1000)
public void shouldCreateProducer() throws Exception {
ConnectionFactory factory = new ConnectionFactoryImpl("localhost", 5672, null, null, "clientID");
...
Connection connection = factory.createConnection();
connection.start();
Session session = connection.createSession(false, AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(session.createTopic("topic://topic-A"));
producer.close();
session.close();
connection.close();
robot.join();
}
}
# ... rest of the code ...
|
ff06ce55d0856cff774bdec5f0e872e093216bce
|
diffs/__init__.py
|
diffs/__init__.py
|
from __future__ import absolute_import, unicode_literals
from django.apps import apps as django_apps
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from .models import DiffLogEntryManager
from dirtyfields import DirtyFieldsMixin
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
from __future__ import absolute_import, unicode_literals
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from django.apps import apps as django_apps
from dirtyfields import DirtyFieldsMixin
from .models import DiffLogEntryManager
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
Reorganize imports to be later
|
Reorganize imports to be later
|
Python
|
mit
|
linuxlewis/django-diffs
|
python
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from django.apps import apps as django_apps
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from .models import DiffLogEntryManager
from dirtyfields import DirtyFieldsMixin
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
## Instruction:
Reorganize imports to be later
## Code After:
from __future__ import absolute_import, unicode_literals
from .signals import connect
__version__ = '0.0.1'
default_app_config = 'diffs.apps.DiffLogConfig'
klasses_to_connect = []
def register(klass):
"""
Decorator function that registers a class to record diffs.
@register
class ExampleModel(models.Model):
...
"""
from django.apps import apps as django_apps
from dirtyfields import DirtyFieldsMixin
from .models import DiffLogEntryManager
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
klass.add_to_class('diffs', DiffLogEntryManager())
if not django_apps.ready:
klasses_to_connect.append(klass)
else:
connect(klass)
return klass
|
// ... existing code ...
from __future__ import absolute_import, unicode_literals
from .signals import connect
// ... modified code ...
class ExampleModel(models.Model):
...
"""
from django.apps import apps as django_apps
from dirtyfields import DirtyFieldsMixin
from .models import DiffLogEntryManager
# Hack to add dirtyfieldsmixin automatically
if DirtyFieldsMixin not in klass.__bases__:
klass.__bases__ = (DirtyFieldsMixin,) + klass.__bases__
// ... rest of the code ...
|
2a08a8a6d5cdac0ddcbaf34977c119c5b75bbe8d
|
wtforms_webwidgets/__init__.py
|
wtforms_webwidgets/__init__.py
|
# from .common import *
from .common import CustomWidgetMixin, custom_widget_wrapper, FieldRenderer, MultiField
|
from .common import *
|
Revert "Possible fix for docs not rendering auto"
|
Revert "Possible fix for docs not rendering auto"
This reverts commit 88c3fd3c4b23b12f4b68d3f5a13279870486d4b2.
|
Python
|
mit
|
nickw444/wtforms-webwidgets
|
python
|
## Code Before:
# from .common import *
from .common import CustomWidgetMixin, custom_widget_wrapper, FieldRenderer, MultiField
## Instruction:
Revert "Possible fix for docs not rendering auto"
This reverts commit 88c3fd3c4b23b12f4b68d3f5a13279870486d4b2.
## Code After:
from .common import *
|
# ... existing code ...
from .common import *
# ... rest of the code ...
|
b8f0029f525e36d43e3d0a27dc379d00f7d519e2
|
kotlin-eclipse-core/src/org/jetbrains/kotlin/core/Activator.java
|
kotlin-eclipse-core/src/org/jetbrains/kotlin/core/Activator.java
|
package org.jetbrains.kotlin.core;
import static org.eclipse.core.resources.ResourcesPlugin.getWorkspace;
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.jetbrains.kotlin.core.builder.ResourceChangeListener;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
public class Activator implements BundleActivator {
private static BundleContext context;
private final IResourceChangeListener resourceChangeListener = new ResourceChangeListener();
public static final String PLUGIN_ID = "org.jetbrains.kotlin.core";
static BundleContext getContext() {
return context;
}
@Override
public void start(BundleContext bundleContext) throws Exception {
Activator.context = bundleContext;
getWorkspace().addResourceChangeListener(resourceChangeListener, IResourceChangeEvent.POST_CHANGE);
}
@Override
public void stop(BundleContext bundleContext) throws Exception {
Activator.context = null;
getWorkspace().removeResourceChangeListener(resourceChangeListener);
}
}
|
package org.jetbrains.kotlin.core;
import static org.eclipse.core.resources.ResourcesPlugin.getWorkspace;
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.eclipse.core.runtime.Plugin;
import org.jetbrains.kotlin.core.builder.ResourceChangeListener;
import org.osgi.framework.BundleContext;
public class Activator extends Plugin {
private static Activator plugin;
private final IResourceChangeListener resourceChangeListener = new ResourceChangeListener();
public static final String PLUGIN_ID = "org.jetbrains.kotlin.core";
public Activator() {
plugin = this;
}
public static Activator getDefault() {
return plugin;
}
@Override
public void start(BundleContext bundleContext) throws Exception {
getWorkspace().addResourceChangeListener(resourceChangeListener, IResourceChangeEvent.POST_CHANGE);
super.start(bundleContext);
}
@Override
public void stop(BundleContext bundleContext) throws Exception {
getWorkspace().removeResourceChangeListener(resourceChangeListener);
plugin = null;
}
}
|
Make core activator extendable of Plugin class
|
Make core activator extendable of Plugin class
|
Java
|
apache-2.0
|
noemus/kotlin-eclipse,noemus/kotlin-eclipse
|
java
|
## Code Before:
package org.jetbrains.kotlin.core;
import static org.eclipse.core.resources.ResourcesPlugin.getWorkspace;
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.jetbrains.kotlin.core.builder.ResourceChangeListener;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
public class Activator implements BundleActivator {
private static BundleContext context;
private final IResourceChangeListener resourceChangeListener = new ResourceChangeListener();
public static final String PLUGIN_ID = "org.jetbrains.kotlin.core";
static BundleContext getContext() {
return context;
}
@Override
public void start(BundleContext bundleContext) throws Exception {
Activator.context = bundleContext;
getWorkspace().addResourceChangeListener(resourceChangeListener, IResourceChangeEvent.POST_CHANGE);
}
@Override
public void stop(BundleContext bundleContext) throws Exception {
Activator.context = null;
getWorkspace().removeResourceChangeListener(resourceChangeListener);
}
}
## Instruction:
Make core activator extendable of Plugin class
## Code After:
package org.jetbrains.kotlin.core;
import static org.eclipse.core.resources.ResourcesPlugin.getWorkspace;
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.eclipse.core.runtime.Plugin;
import org.jetbrains.kotlin.core.builder.ResourceChangeListener;
import org.osgi.framework.BundleContext;
public class Activator extends Plugin {
private static Activator plugin;
private final IResourceChangeListener resourceChangeListener = new ResourceChangeListener();
public static final String PLUGIN_ID = "org.jetbrains.kotlin.core";
public Activator() {
plugin = this;
}
public static Activator getDefault() {
return plugin;
}
@Override
public void start(BundleContext bundleContext) throws Exception {
getWorkspace().addResourceChangeListener(resourceChangeListener, IResourceChangeEvent.POST_CHANGE);
super.start(bundleContext);
}
@Override
public void stop(BundleContext bundleContext) throws Exception {
getWorkspace().removeResourceChangeListener(resourceChangeListener);
plugin = null;
}
}
|
# ... existing code ...
import org.eclipse.core.resources.IResourceChangeEvent;
import org.eclipse.core.resources.IResourceChangeListener;
import org.eclipse.core.runtime.Plugin;
import org.jetbrains.kotlin.core.builder.ResourceChangeListener;
import org.osgi.framework.BundleContext;
public class Activator extends Plugin {
private static Activator plugin;
private final IResourceChangeListener resourceChangeListener = new ResourceChangeListener();
public static final String PLUGIN_ID = "org.jetbrains.kotlin.core";
public Activator() {
plugin = this;
}
public static Activator getDefault() {
return plugin;
}
@Override
public void start(BundleContext bundleContext) throws Exception {
getWorkspace().addResourceChangeListener(resourceChangeListener, IResourceChangeEvent.POST_CHANGE);
super.start(bundleContext);
}
@Override
public void stop(BundleContext bundleContext) throws Exception {
getWorkspace().removeResourceChangeListener(resourceChangeListener);
plugin = null;
}
}
# ... rest of the code ...
|
6d118fed4df334e093840d0bcaad98a06214793b
|
week1/the_real_deal/sum_matrix.py
|
week1/the_real_deal/sum_matrix.py
|
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
p = [sum(x) for x in n]
print (len(p))
return sum(p)
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
|
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
return sum([sum(x) for x in n])
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
|
Make it look more pythonic
|
Make it look more pythonic
|
Python
|
bsd-3-clause
|
sevgo/Programming101
|
python
|
## Code Before:
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
p = [sum(x) for x in n]
print (len(p))
return sum(p)
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
## Instruction:
Make it look more pythonic
## Code After:
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
return sum([sum(x) for x in n])
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
|
# ... existing code ...
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
return sum([sum(x) for x in n])
if __name__ == '__main__':
# ... rest of the code ...
|
551b7124ae30b8735c4e90b2cfdcf541f5dbcfc1
|
core/src/main/java/com/dtolabs/rundeck/core/dispatcher/MutableDataContext.java
|
core/src/main/java/com/dtolabs/rundeck/core/dispatcher/MutableDataContext.java
|
package com.dtolabs.rundeck.core.dispatcher;
/**
* Created by greg on 5/26/16.
*/
public interface MutableDataContext extends DataContext, Mergable<DataContext> {
void merge(DataContext item);
}
|
package com.dtolabs.rundeck.core.dispatcher;
import java.util.HashMap;
import java.util.Map;
/**
* Created by greg on 5/26/16.
*/
public interface MutableDataContext extends DataContext, Mergable<DataContext> {
void merge(DataContext item);
/**
* Put value
*
* @param group
* @param key
* @param value
*
* @return
*/
public default String put(String group, String key, String value) {
return group(group).put(key, value);
}
default Map<String, String> group(final String group) {
if (null == get(group)) {
put(group, new HashMap<>());
}
return get(group);
}
}
|
Add utility methods for interface
|
Add utility methods for interface
|
Java
|
apache-2.0
|
rophy/rundeck,rophy/rundeck,variacode/rundeck,rundeck/rundeck,rophy/rundeck,damageboy/rundeck,rundeck/rundeck,damageboy/rundeck,variacode/rundeck,variacode/rundeck,rophy/rundeck,variacode/rundeck,damageboy/rundeck,rundeck/rundeck,damageboy/rundeck,variacode/rundeck,rundeck/rundeck,rundeck/rundeck
|
java
|
## Code Before:
package com.dtolabs.rundeck.core.dispatcher;
/**
* Created by greg on 5/26/16.
*/
public interface MutableDataContext extends DataContext, Mergable<DataContext> {
void merge(DataContext item);
}
## Instruction:
Add utility methods for interface
## Code After:
package com.dtolabs.rundeck.core.dispatcher;
import java.util.HashMap;
import java.util.Map;
/**
* Created by greg on 5/26/16.
*/
public interface MutableDataContext extends DataContext, Mergable<DataContext> {
void merge(DataContext item);
/**
* Put value
*
* @param group
* @param key
* @param value
*
* @return
*/
public default String put(String group, String key, String value) {
return group(group).put(key, value);
}
default Map<String, String> group(final String group) {
if (null == get(group)) {
put(group, new HashMap<>());
}
return get(group);
}
}
|
// ... existing code ...
package com.dtolabs.rundeck.core.dispatcher;
import java.util.HashMap;
import java.util.Map;
/**
* Created by greg on 5/26/16.
// ... modified code ...
*/
public interface MutableDataContext extends DataContext, Mergable<DataContext> {
void merge(DataContext item);
/**
* Put value
*
* @param group
* @param key
* @param value
*
* @return
*/
public default String put(String group, String key, String value) {
return group(group).put(key, value);
}
default Map<String, String> group(final String group) {
if (null == get(group)) {
put(group, new HashMap<>());
}
return get(group);
}
}
// ... rest of the code ...
|
315b581b9b0438389c7f4eb651d2893b805a2369
|
translit.py
|
translit.py
|
class Transliterator(object):
def __init__(self, mapping, invert=False):
self.mapping = [
(v, k) if invert else (k, v)
for k, v in mapping.items()
]
self._rules = sorted(
self.mapping,
key=lambda item: len(item[0]),
reverse=True,
)
@property
def rules(self):
for r in self._rules:
yield r
# Handle the case when one source upper char is represented by
# several latin chars, all uppercase. i.e. "CH" instead of "Ch"
k, v = r
if len(k) > 1 and k[0].isupper():
yield (k.upper(), v.upper())
def convert(self, input_string):
"""Transliterate input string."""
for (source_char, translit_char) in self.rules:
input_string = input_string.replace(source_char, translit_char)
return input_string
|
class Transliterator(object):
def __init__(self, mapping, invert=False):
self.mapping = [
(v, k) if invert else (k, v)
for k, v in mapping.items()
]
self._rules = sorted(
self.mapping,
key=lambda item: len(item[0]),
reverse=True,
)
@property
def rules(self):
for r in self._rules:
k, v = r
if len(k) == 0:
continue # for case when char is removed and mapping inverted
yield r
# Handle the case when one source upper char is represented by
# several latin chars, all uppercase. i.e. "CH" instead of "Ch"
if len(k) > 1 and k[0].isupper():
yield (k.upper(), v.upper())
def convert(self, input_string):
"""Transliterate input string."""
for (source_char, translit_char) in self.rules:
input_string = input_string.replace(source_char, translit_char)
return input_string
|
Handle case when char is mapped to empty (removed) and table is inverted
|
Handle case when char is mapped to empty (removed) and table is inverted
|
Python
|
mit
|
malexer/SublimeTranslit
|
python
|
## Code Before:
class Transliterator(object):
def __init__(self, mapping, invert=False):
self.mapping = [
(v, k) if invert else (k, v)
for k, v in mapping.items()
]
self._rules = sorted(
self.mapping,
key=lambda item: len(item[0]),
reverse=True,
)
@property
def rules(self):
for r in self._rules:
yield r
# Handle the case when one source upper char is represented by
# several latin chars, all uppercase. i.e. "CH" instead of "Ch"
k, v = r
if len(k) > 1 and k[0].isupper():
yield (k.upper(), v.upper())
def convert(self, input_string):
"""Transliterate input string."""
for (source_char, translit_char) in self.rules:
input_string = input_string.replace(source_char, translit_char)
return input_string
## Instruction:
Handle case when char is mapped to empty (removed) and table is inverted
## Code After:
class Transliterator(object):
def __init__(self, mapping, invert=False):
self.mapping = [
(v, k) if invert else (k, v)
for k, v in mapping.items()
]
self._rules = sorted(
self.mapping,
key=lambda item: len(item[0]),
reverse=True,
)
@property
def rules(self):
for r in self._rules:
k, v = r
if len(k) == 0:
continue # for case when char is removed and mapping inverted
yield r
# Handle the case when one source upper char is represented by
# several latin chars, all uppercase. i.e. "CH" instead of "Ch"
if len(k) > 1 and k[0].isupper():
yield (k.upper(), v.upper())
def convert(self, input_string):
"""Transliterate input string."""
for (source_char, translit_char) in self.rules:
input_string = input_string.replace(source_char, translit_char)
return input_string
|
// ... existing code ...
@property
def rules(self):
for r in self._rules:
k, v = r
if len(k) == 0:
continue # for case when char is removed and mapping inverted
yield r
# Handle the case when one source upper char is represented by
# several latin chars, all uppercase. i.e. "CH" instead of "Ch"
if len(k) > 1 and k[0].isupper():
yield (k.upper(), v.upper())
// ... rest of the code ...
|
c5c2d3c411ba38a7b110044e04657ae6584be861
|
scripts/helpers.py
|
scripts/helpers.py
|
def printSnapshot(doc):
print(u'Created {} => {}'.format(doc.id, doc.to_dict()))
def queryUsers(db):
users_ref = db.collection(u'users')
docs = users_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryRequests(db):
requests_ref = db.collection(u'requests')
docs = requests_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def getUser(userId, users):
for user in users:
if user.id == userId:
return user
return None
|
def printSnapshot(doc):
print(u'Created {} => {}'.format(doc.id, doc.to_dict()))
def queryUsers(db):
users_ref = db.collection(u'users')
docs = users_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryRequests(db):
requests_ref = db.collection(u'requests')
docs = requests_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryMessages(db):
messages_ref = db.collection(u'messages')
docs = messages_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def getUser(userId, users):
for user in users:
if user.id == userId:
return user
return None
|
Add script to clean the message table
|
Add script to clean the message table
|
Python
|
mit
|
frinder/frinder-app,frinder/frinder-app,frinder/frinder-app
|
python
|
## Code Before:
def printSnapshot(doc):
print(u'Created {} => {}'.format(doc.id, doc.to_dict()))
def queryUsers(db):
users_ref = db.collection(u'users')
docs = users_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryRequests(db):
requests_ref = db.collection(u'requests')
docs = requests_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def getUser(userId, users):
for user in users:
if user.id == userId:
return user
return None
## Instruction:
Add script to clean the message table
## Code After:
def printSnapshot(doc):
print(u'Created {} => {}'.format(doc.id, doc.to_dict()))
def queryUsers(db):
users_ref = db.collection(u'users')
docs = users_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryRequests(db):
requests_ref = db.collection(u'requests')
docs = requests_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def queryMessages(db):
messages_ref = db.collection(u'messages')
docs = messages_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def getUser(userId, users):
for user in users:
if user.id == userId:
return user
return None
|
# ... existing code ...
docList.append(doc)
return docList
def queryMessages(db):
messages_ref = db.collection(u'messages')
docs = messages_ref.get()
docList = list()
for doc in docs:
docList.append(doc)
return docList
def getUser(userId, users):
for user in users:
if user.id == userId:
# ... rest of the code ...
|
72af62bdf9339c880b0cc0f1e1002cf1961e962b
|
rule.py
|
rule.py
|
class PriceRule:
"""PriceRule is a rule that triggers when a stock price satisfies a condition.
The condition is usually greater, equal or lesser than a given value.
"""
def __init__(self, symbol, condition):
self.symbol = symbol
self.condition = condition
def matches(self, exchange):
try:
stock = exchange[self.symbol]
except KeyError:
return False
return self.condition(stock) if stock.price else False
def depends_on(self):
return {self.symbol}
class AndRule:
def __init__(self, rule1, rule2):
self.rule1 = rule1
self.rule2 = rule2
|
class PriceRule:
"""PriceRule is a rule that triggers when a stock price satisfies a condition.
The condition is usually greater, equal or lesser than a given value.
"""
def __init__(self, symbol, condition):
self.symbol = symbol
self.condition = condition
def matches(self, exchange):
try:
stock = exchange[self.symbol]
except KeyError:
return False
return self.condition(stock) if stock.price else False
def depends_on(self):
return {self.symbol}
class AndRule:
def __init__(self, rule1, rule2):
self.rule1 = rule1
self.rule2 = rule2
def matches(self, exchange):
matches_bool = self.rule1.matches(exchange) and self.rule2.matches(exchange)
return matches_bool
|
Add matches method to AndRule class.
|
Add matches method to AndRule class.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
python
|
## Code Before:
class PriceRule:
"""PriceRule is a rule that triggers when a stock price satisfies a condition.
The condition is usually greater, equal or lesser than a given value.
"""
def __init__(self, symbol, condition):
self.symbol = symbol
self.condition = condition
def matches(self, exchange):
try:
stock = exchange[self.symbol]
except KeyError:
return False
return self.condition(stock) if stock.price else False
def depends_on(self):
return {self.symbol}
class AndRule:
def __init__(self, rule1, rule2):
self.rule1 = rule1
self.rule2 = rule2
## Instruction:
Add matches method to AndRule class.
## Code After:
class PriceRule:
"""PriceRule is a rule that triggers when a stock price satisfies a condition.
The condition is usually greater, equal or lesser than a given value.
"""
def __init__(self, symbol, condition):
self.symbol = symbol
self.condition = condition
def matches(self, exchange):
try:
stock = exchange[self.symbol]
except KeyError:
return False
return self.condition(stock) if stock.price else False
def depends_on(self):
return {self.symbol}
class AndRule:
def __init__(self, rule1, rule2):
self.rule1 = rule1
self.rule2 = rule2
def matches(self, exchange):
matches_bool = self.rule1.matches(exchange) and self.rule2.matches(exchange)
return matches_bool
|
# ... existing code ...
def __init__(self, rule1, rule2):
self.rule1 = rule1
self.rule2 = rule2
def matches(self, exchange):
matches_bool = self.rule1.matches(exchange) and self.rule2.matches(exchange)
return matches_bool
# ... rest of the code ...
|
f268c54d0f0b4bd9cc3367f997e89d37ce876e83
|
src/main/java/com/hubspot/dropwizard/guice/DropwizardEnvironmentModule.java
|
src/main/java/com/hubspot/dropwizard/guice/DropwizardEnvironmentModule.java
|
package com.hubspot.dropwizard.guice;
import com.google.common.base.Optional;
import com.google.inject.AbstractModule;
import com.google.inject.Provider;
import com.google.inject.Provides;
import com.yammer.dropwizard.config.Configuration;
import com.yammer.dropwizard.config.Environment;
public class DropwizardEnvironmentModule<T extends Configuration> extends AbstractModule {
private T configuration;
private Environment environment;
private Class<? super T> configurationClass;
public DropwizardEnvironmentModule(Class<T> configurationClass) {
this.configurationClass = configurationClass;
}
@Override
protected void configure() {
Provider<T> provider = new CustomConfigurationProvider();
bind(configurationClass).toProvider(provider);
if (configurationClass != Configuration.class) {
bind(Configuration.class).toProvider(provider);
}
}
public void setEnvironmentData(T configuration, Environment environment) {
this.configuration = configuration;
this.environment = environment;
}
@Provides
public Environment providesEnvironment() {
return environment;
}
private class CustomConfigurationProvider implements Provider<T> {
@Override
public T get() {
return configuration;
}
}
}
|
package com.hubspot.dropwizard.guice;
import com.google.common.base.Optional;
import com.google.inject.AbstractModule;
import com.google.inject.CreationException;
import com.google.inject.Provider;
import com.google.inject.Provides;
import com.google.inject.spi.Message;
import com.yammer.dropwizard.config.Configuration;
import com.yammer.dropwizard.config.Environment;
import java.util.Arrays;
public class DropwizardEnvironmentModule<T extends Configuration> extends AbstractModule {
private T configuration;
private Environment environment;
private Class<? super T> configurationClass;
public DropwizardEnvironmentModule(Class<T> configurationClass) {
this.configurationClass = configurationClass;
}
@Override
protected void configure() {
Provider<T> provider = new CustomConfigurationProvider();
bind(configurationClass).toProvider(provider);
if (configurationClass != Configuration.class) {
bind(Configuration.class).toProvider(provider);
}
}
public void setEnvironmentData(T configuration, Environment environment) {
this.configuration = configuration;
this.environment = environment;
}
@Provides
public Environment providesEnvironment() {
if (environment == null) {
throw new CreationException(Arrays.asList(new Message("The dropwizard environment has not yet been set. This is likely caused by trying to access the dropwizard environment during the bootstrap phase.")));
}
return environment;
}
private class CustomConfigurationProvider implements Provider<T> {
@Override
public T get() {
if (configuration == null) {
throw new CreationException(Arrays.asList(new Message("The dropwizard configuration has not yet been set. This is likely caused by trying to access the dropwizard environment during the bootstrap phase.")));
}
return configuration;
}
}
}
|
Throw exceptions if accessing configuration/environment during bootstrap phase.
|
Throw exceptions if accessing configuration/environment during bootstrap phase.
|
Java
|
apache-2.0
|
HubSpot/dropwizard-guice,ingenieux/dropwizard-governator,puneetjaiswal/dropwizard-guice,oillio/dropwizard-guice,mwei1us/dropwizard-guice,alexan/dropwizard-guice,thomas-p-wilson/dropwizard-guice
|
java
|
## Code Before:
package com.hubspot.dropwizard.guice;
import com.google.common.base.Optional;
import com.google.inject.AbstractModule;
import com.google.inject.Provider;
import com.google.inject.Provides;
import com.yammer.dropwizard.config.Configuration;
import com.yammer.dropwizard.config.Environment;
public class DropwizardEnvironmentModule<T extends Configuration> extends AbstractModule {
private T configuration;
private Environment environment;
private Class<? super T> configurationClass;
public DropwizardEnvironmentModule(Class<T> configurationClass) {
this.configurationClass = configurationClass;
}
@Override
protected void configure() {
Provider<T> provider = new CustomConfigurationProvider();
bind(configurationClass).toProvider(provider);
if (configurationClass != Configuration.class) {
bind(Configuration.class).toProvider(provider);
}
}
public void setEnvironmentData(T configuration, Environment environment) {
this.configuration = configuration;
this.environment = environment;
}
@Provides
public Environment providesEnvironment() {
return environment;
}
private class CustomConfigurationProvider implements Provider<T> {
@Override
public T get() {
return configuration;
}
}
}
## Instruction:
Throw exceptions if accessing configuration/environment during bootstrap phase.
## Code After:
package com.hubspot.dropwizard.guice;
import com.google.common.base.Optional;
import com.google.inject.AbstractModule;
import com.google.inject.CreationException;
import com.google.inject.Provider;
import com.google.inject.Provides;
import com.google.inject.spi.Message;
import com.yammer.dropwizard.config.Configuration;
import com.yammer.dropwizard.config.Environment;
import java.util.Arrays;
public class DropwizardEnvironmentModule<T extends Configuration> extends AbstractModule {
private T configuration;
private Environment environment;
private Class<? super T> configurationClass;
public DropwizardEnvironmentModule(Class<T> configurationClass) {
this.configurationClass = configurationClass;
}
@Override
protected void configure() {
Provider<T> provider = new CustomConfigurationProvider();
bind(configurationClass).toProvider(provider);
if (configurationClass != Configuration.class) {
bind(Configuration.class).toProvider(provider);
}
}
public void setEnvironmentData(T configuration, Environment environment) {
this.configuration = configuration;
this.environment = environment;
}
@Provides
public Environment providesEnvironment() {
if (environment == null) {
throw new CreationException(Arrays.asList(new Message("The dropwizard environment has not yet been set. This is likely caused by trying to access the dropwizard environment during the bootstrap phase.")));
}
return environment;
}
private class CustomConfigurationProvider implements Provider<T> {
@Override
public T get() {
if (configuration == null) {
throw new CreationException(Arrays.asList(new Message("The dropwizard configuration has not yet been set. This is likely caused by trying to access the dropwizard environment during the bootstrap phase.")));
}
return configuration;
}
}
}
|
...
import com.google.common.base.Optional;
import com.google.inject.AbstractModule;
import com.google.inject.CreationException;
import com.google.inject.Provider;
import com.google.inject.Provides;
import com.google.inject.spi.Message;
import com.yammer.dropwizard.config.Configuration;
import com.yammer.dropwizard.config.Environment;
import java.util.Arrays;
public class DropwizardEnvironmentModule<T extends Configuration> extends AbstractModule {
private T configuration;
...
@Provides
public Environment providesEnvironment() {
if (environment == null) {
throw new CreationException(Arrays.asList(new Message("The dropwizard environment has not yet been set. This is likely caused by trying to access the dropwizard environment during the bootstrap phase.")));
}
return environment;
}
...
private class CustomConfigurationProvider implements Provider<T> {
@Override
public T get() {
if (configuration == null) {
throw new CreationException(Arrays.asList(new Message("The dropwizard configuration has not yet been set. This is likely caused by trying to access the dropwizard environment during the bootstrap phase.")));
}
return configuration;
}
}
...
|
805486628f02a531370540b6443d9547f7f56c68
|
ui-tests/src/test/java/io/syndesis/qe/CucumberTest.java
|
ui-tests/src/test/java/io/syndesis/qe/CucumberTest.java
|
package io.syndesis.qe;
import com.codeborne.selenide.Configuration;
import cucumber.api.CucumberOptions;
import cucumber.api.junit.Cucumber;
import org.junit.BeforeClass;
import org.junit.runner.RunWith;
@RunWith(Cucumber.class)
@CucumberOptions(
features = "classpath:features",
tags = "not @wip, @manual, @deprecated",
format = {"pretty", "html:target/cucumber/cucumber-html", "junit:target/cucumber/cucumber-junit.xml", "json:target/cucumber/cucumber-report.json"}
)
public class CucumberTest extends TestSuiteParent {
@BeforeClass
public static void setupCucumber() {
//set up Selenide
Configuration.timeout = TestConfiguration.getConfigTimeout() * 1000;
//We will now use custom web driver
//Configuration.browser = TestConfiguration.syndesisBrowser();
Configuration.browser = "io.syndesis.qe.CustomWebDriverProvider";
Configuration.browserSize= "1920x1080";
}
}
|
package io.syndesis.qe;
import com.codeborne.selenide.Configuration;
import cucumber.api.CucumberOptions;
import cucumber.api.junit.Cucumber;
import org.junit.BeforeClass;
import org.junit.runner.RunWith;
@RunWith(Cucumber.class)
@CucumberOptions(
features = "classpath:features",
tags = {"not @wip", "not @manual", "not @deprecated"},
format = {"pretty", "html:target/cucumber/cucumber-html", "junit:target/cucumber/cucumber-junit.xml", "json:target/cucumber/cucumber-report.json"}
)
public class CucumberTest extends TestSuiteParent {
@BeforeClass
public static void setupCucumber() {
//set up Selenide
Configuration.timeout = TestConfiguration.getConfigTimeout() * 1000;
//We will now use custom web driver
//Configuration.browser = TestConfiguration.syndesisBrowser();
Configuration.browser = "io.syndesis.qe.CustomWebDriverProvider";
Configuration.browserSize= "1920x1080";
}
}
|
Update cucumber options to new format
|
[ui][fix] Update cucumber options to new format
|
Java
|
apache-2.0
|
mcada/syndesis-qe,mcada/syndesis-qe,mcada/syndesis-qe
|
java
|
## Code Before:
package io.syndesis.qe;
import com.codeborne.selenide.Configuration;
import cucumber.api.CucumberOptions;
import cucumber.api.junit.Cucumber;
import org.junit.BeforeClass;
import org.junit.runner.RunWith;
@RunWith(Cucumber.class)
@CucumberOptions(
features = "classpath:features",
tags = "not @wip, @manual, @deprecated",
format = {"pretty", "html:target/cucumber/cucumber-html", "junit:target/cucumber/cucumber-junit.xml", "json:target/cucumber/cucumber-report.json"}
)
public class CucumberTest extends TestSuiteParent {
@BeforeClass
public static void setupCucumber() {
//set up Selenide
Configuration.timeout = TestConfiguration.getConfigTimeout() * 1000;
//We will now use custom web driver
//Configuration.browser = TestConfiguration.syndesisBrowser();
Configuration.browser = "io.syndesis.qe.CustomWebDriverProvider";
Configuration.browserSize= "1920x1080";
}
}
## Instruction:
[ui][fix] Update cucumber options to new format
## Code After:
package io.syndesis.qe;
import com.codeborne.selenide.Configuration;
import cucumber.api.CucumberOptions;
import cucumber.api.junit.Cucumber;
import org.junit.BeforeClass;
import org.junit.runner.RunWith;
@RunWith(Cucumber.class)
@CucumberOptions(
features = "classpath:features",
tags = {"not @wip", "not @manual", "not @deprecated"},
format = {"pretty", "html:target/cucumber/cucumber-html", "junit:target/cucumber/cucumber-junit.xml", "json:target/cucumber/cucumber-report.json"}
)
public class CucumberTest extends TestSuiteParent {
@BeforeClass
public static void setupCucumber() {
//set up Selenide
Configuration.timeout = TestConfiguration.getConfigTimeout() * 1000;
//We will now use custom web driver
//Configuration.browser = TestConfiguration.syndesisBrowser();
Configuration.browser = "io.syndesis.qe.CustomWebDriverProvider";
Configuration.browserSize= "1920x1080";
}
}
|
...
@RunWith(Cucumber.class)
@CucumberOptions(
features = "classpath:features",
tags = {"not @wip", "not @manual", "not @deprecated"},
format = {"pretty", "html:target/cucumber/cucumber-html", "junit:target/cucumber/cucumber-junit.xml", "json:target/cucumber/cucumber-report.json"}
)
public class CucumberTest extends TestSuiteParent {
...
|
7e2565007c926765750641b048607ed29b8aada0
|
cmsplugin_zinnia/admin.py
|
cmsplugin_zinnia/admin.py
|
"""Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
|
"""Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
# https://github.com/django-blog-zinnia/cmsplugin-zinnia/pull/61
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
|
Add comment about why excepting KeyError
|
Add comment about why excepting KeyError
|
Python
|
bsd-3-clause
|
bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia
|
python
|
## Code Before:
"""Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
## Instruction:
Add comment about why excepting KeyError
## Code After:
"""Admin of Zinnia CMS Plugins"""
from django.contrib import admin
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from cms.plugin_rendering import render_placeholder
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from zinnia.models import Entry
from zinnia.admin.entry import EntryAdmin
from zinnia.settings import ENTRY_BASE_MODEL
class EntryPlaceholderAdmin(PlaceholderAdminMixin, EntryAdmin):
"""
EntryPlaceholder Admin
"""
fieldsets = (
(_('Content'), {'fields': (('title', 'status'), 'image')}),) + \
EntryAdmin.fieldsets[1:]
def save_model(self, request, entry, form, change):
"""
Fill the content field with the interpretation
of the placeholder
"""
context = RequestContext(request)
try:
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
# https://github.com/django-blog-zinnia/cmsplugin-zinnia/pull/61
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
if ENTRY_BASE_MODEL == 'cmsplugin_zinnia.placeholder.EntryPlaceholder':
admin.site.register(Entry, EntryPlaceholderAdmin)
|
// ... existing code ...
content = render_placeholder(entry.content_placeholder, context)
entry.content = content or ''
except KeyError:
# https://github.com/django-blog-zinnia/cmsplugin-zinnia/pull/61
entry.content = ''
super(EntryPlaceholderAdmin, self).save_model(
request, entry, form, change)
// ... rest of the code ...
|
6cf8bad4faa15bcbc149db678e2ec232ce82b72a
|
utils/efushell/SocketDriver.py
|
utils/efushell/SocketDriver.py
|
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
Move string formatting onto two lines for readability
|
Move string formatting onto two lines for readability
|
Python
|
bsd-2-clause
|
ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit
|
python
|
## Code Before:
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
## Instruction:
Move string formatting onto two lines for readability
## Code After:
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
|
# ... existing code ...
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
# ... rest of the code ...
|
a29a36a457b30ee4599ff51c9715ac4067389be0
|
webservice/src/main/java/org/openhds/webservice/CacheResponseWriter.java
|
webservice/src/main/java/org/openhds/webservice/CacheResponseWriter.java
|
package org.openhds.webservice;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
public class CacheResponseWriter {
public static void writeResponse(File fileToWrite, HttpServletResponse response) throws IOException {
if (!fileToWrite.exists()) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
response.setStatus(HttpServletResponse.SC_OK);
InputStream is = null;
try {
is = new BufferedInputStream(new FileInputStream(fileToWrite));
IOUtils.copy(is, response.getOutputStream());
} finally {
if (is != null) {
IOUtils.closeQuietly(is);
}
}
}
}
|
package org.openhds.webservice;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
public class CacheResponseWriter {
public static void writeResponse(File fileToWrite, HttpServletResponse response) throws IOException {
if (!fileToWrite.exists()) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
else{
response.setStatus(HttpServletResponse.SC_OK);
InputStream is = null;
try {
is = new BufferedInputStream(new FileInputStream(fileToWrite));
IOUtils.copy(is, response.getOutputStream());
} finally {
if (is != null) {
IOUtils.closeQuietly(is);
}
}
}
}
}
|
Write Http-ErrorCode 404 if cached xml file for task does not exist
|
Write Http-ErrorCode 404 if cached xml file for task does not exist
|
Java
|
bsd-3-clause
|
SwissTPH/openhds-server,SwissTPH/openhds-server,SwissTPH/openhds-server
|
java
|
## Code Before:
package org.openhds.webservice;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
public class CacheResponseWriter {
public static void writeResponse(File fileToWrite, HttpServletResponse response) throws IOException {
if (!fileToWrite.exists()) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
response.setStatus(HttpServletResponse.SC_OK);
InputStream is = null;
try {
is = new BufferedInputStream(new FileInputStream(fileToWrite));
IOUtils.copy(is, response.getOutputStream());
} finally {
if (is != null) {
IOUtils.closeQuietly(is);
}
}
}
}
## Instruction:
Write Http-ErrorCode 404 if cached xml file for task does not exist
## Code After:
package org.openhds.webservice;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
public class CacheResponseWriter {
public static void writeResponse(File fileToWrite, HttpServletResponse response) throws IOException {
if (!fileToWrite.exists()) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
else{
response.setStatus(HttpServletResponse.SC_OK);
InputStream is = null;
try {
is = new BufferedInputStream(new FileInputStream(fileToWrite));
IOUtils.copy(is, response.getOutputStream());
} finally {
if (is != null) {
IOUtils.closeQuietly(is);
}
}
}
}
}
|
// ... existing code ...
if (!fileToWrite.exists()) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
else{
response.setStatus(HttpServletResponse.SC_OK);
InputStream is = null;
try {
is = new BufferedInputStream(new FileInputStream(fileToWrite));
IOUtils.copy(is, response.getOutputStream());
} finally {
if (is != null) {
IOUtils.closeQuietly(is);
}
}
}
}
}
// ... rest of the code ...
|
08e84dcc0bce7a1914bc7fa734ca51c0dde362d1
|
lab/monitors/nova_service_list.py
|
lab/monitors/nova_service_list.py
|
def start(lab, log, args):
import time
from fabric.context_managers import shell_env
grep_host = args.get('grep_host', 'overcloud-')
duration = args['duration']
period = args['period']
statuses = {'up': 1, 'down': 0}
server = lab.director()
start_time = time.time()
while start_time + duration > time.time():
with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant):
res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True)
results = [line.split() for line in res.split('\n')]
msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results])
log.info('{1}'.format(grep_host, msg))
time.sleep(period)
|
def start(lab, log, args):
from fabric.context_managers import shell_env
grep_host = args.get('grep_host', 'overcloud-')
statuses = {'up': 1, 'down': 0}
server = lab.director()
with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant):
res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True)
results = [line.split() for line in res.split('\n')]
msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results])
log.info('{1}'.format(grep_host, msg))
|
Verify services status if FI is rebooted
|
Verify services status if FI is rebooted
Change-Id: Ia02ef16d53fbb7b55a8de884ff16a4bef345a1f2
|
Python
|
apache-2.0
|
CiscoSystems/os-sqe,CiscoSystems/os-sqe,CiscoSystems/os-sqe
|
python
|
## Code Before:
def start(lab, log, args):
import time
from fabric.context_managers import shell_env
grep_host = args.get('grep_host', 'overcloud-')
duration = args['duration']
period = args['period']
statuses = {'up': 1, 'down': 0}
server = lab.director()
start_time = time.time()
while start_time + duration > time.time():
with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant):
res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True)
results = [line.split() for line in res.split('\n')]
msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results])
log.info('{1}'.format(grep_host, msg))
time.sleep(period)
## Instruction:
Verify services status if FI is rebooted
Change-Id: Ia02ef16d53fbb7b55a8de884ff16a4bef345a1f2
## Code After:
def start(lab, log, args):
from fabric.context_managers import shell_env
grep_host = args.get('grep_host', 'overcloud-')
statuses = {'up': 1, 'down': 0}
server = lab.director()
with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant):
res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True)
results = [line.split() for line in res.split('\n')]
msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results])
log.info('{1}'.format(grep_host, msg))
|
...
def start(lab, log, args):
from fabric.context_managers import shell_env
grep_host = args.get('grep_host', 'overcloud-')
statuses = {'up': 1, 'down': 0}
server = lab.director()
with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant):
res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True)
results = [line.split() for line in res.split('\n')]
msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results])
log.info('{1}'.format(grep_host, msg))
...
|
855f14a819774f5417417e7a14938036c4115833
|
src/server.h
|
src/server.h
|
const int XAPIAND_HTTP_PORT_DEFAULT = 8880;
const int XAPIAND_BINARY_PORT_DEFAULT = 8890;
class XapiandServer : public Task {
private:
ev::dynamic_loop loop;
ev::sig sig;
ev::async quit;
ev::io http_io;
int http_sock;
ev::io binary_io;
int binary_sock;
DatabasePool database_pool;
void bind_http();
void bind_binary();
void io_accept_http(ev::io &watcher, int revents);
void io_accept_binary(ev::io &watcher, int revents);
void signal_cb(ev::sig &signal, int revents);
void quit_cb(ev::async &watcher, int revents);
public:
XapiandServer(int http_sock_, int binary_sock_);
~XapiandServer();
void run();
};
#endif /* XAPIAND_INCLUDED_SERVER_H */
|
const int XAPIAND_HTTP_PORT_DEFAULT = 8880;
const int XAPIAND_BINARY_PORT_DEFAULT = 8890;
class XapiandServer : public Task {
private:
ev::dynamic_loop dynamic_loop;
ev::loop_ref *loop;
ev::sig sig;
ev::async quit;
ev::io http_io;
int http_sock;
ev::io binary_io;
int binary_sock;
DatabasePool database_pool;
void bind_http();
void bind_binary();
void io_accept_http(ev::io &watcher, int revents);
void io_accept_binary(ev::io &watcher, int revents);
void signal_cb(ev::sig &signal, int revents);
void quit_cb(ev::async &watcher, int revents);
public:
XapiandServer(int http_sock_, int binary_sock_, ev::loop_ref *loop_=NULL);
~XapiandServer();
void run();
};
#endif /* XAPIAND_INCLUDED_SERVER_H */
|
Allow passing an event loop to XapianServer
|
Allow passing an event loop to XapianServer
|
C
|
mit
|
Kronuz/Xapiand,Kronuz/Xapiand,Kronuz/Xapiand,Kronuz/Xapiand,Kronuz/Xapiand,Kronuz/Xapiand
|
c
|
## Code Before:
const int XAPIAND_HTTP_PORT_DEFAULT = 8880;
const int XAPIAND_BINARY_PORT_DEFAULT = 8890;
class XapiandServer : public Task {
private:
ev::dynamic_loop loop;
ev::sig sig;
ev::async quit;
ev::io http_io;
int http_sock;
ev::io binary_io;
int binary_sock;
DatabasePool database_pool;
void bind_http();
void bind_binary();
void io_accept_http(ev::io &watcher, int revents);
void io_accept_binary(ev::io &watcher, int revents);
void signal_cb(ev::sig &signal, int revents);
void quit_cb(ev::async &watcher, int revents);
public:
XapiandServer(int http_sock_, int binary_sock_);
~XapiandServer();
void run();
};
#endif /* XAPIAND_INCLUDED_SERVER_H */
## Instruction:
Allow passing an event loop to XapianServer
## Code After:
const int XAPIAND_HTTP_PORT_DEFAULT = 8880;
const int XAPIAND_BINARY_PORT_DEFAULT = 8890;
class XapiandServer : public Task {
private:
ev::dynamic_loop dynamic_loop;
ev::loop_ref *loop;
ev::sig sig;
ev::async quit;
ev::io http_io;
int http_sock;
ev::io binary_io;
int binary_sock;
DatabasePool database_pool;
void bind_http();
void bind_binary();
void io_accept_http(ev::io &watcher, int revents);
void io_accept_binary(ev::io &watcher, int revents);
void signal_cb(ev::sig &signal, int revents);
void quit_cb(ev::async &watcher, int revents);
public:
XapiandServer(int http_sock_, int binary_sock_, ev::loop_ref *loop_=NULL);
~XapiandServer();
void run();
};
#endif /* XAPIAND_INCLUDED_SERVER_H */
|
// ... existing code ...
class XapiandServer : public Task {
private:
ev::dynamic_loop dynamic_loop;
ev::loop_ref *loop;
ev::sig sig;
ev::async quit;
// ... modified code ...
void quit_cb(ev::async &watcher, int revents);
public:
XapiandServer(int http_sock_, int binary_sock_, ev::loop_ref *loop_=NULL);
~XapiandServer();
void run();
// ... rest of the code ...
|
bbab158f09e5fd125ec2558bfa319f78e96e13ac
|
a_filter.h
|
a_filter.h
|
namespace a {
struct source;
struct filterInstance {
virtual void filter(float *buffer, size_t samples, bool strero, float sampleRate) = 0;
virtual ~filterInstance();
};
struct filter {
virtual void init(source *);
virtual filterInstance *create() = 0;
virtual ~filter();
};
struct echoFilter;
struct echoFilterInstance : filterInstance {
virtual void filter(float *buffer, size_t samples, bool stereo, float sampleRate);
virtual ~echoFilterInstance();
echoFilterInstance(echoFilter *parent);
private:
u::vector<float> m_buffer;
echoFilter *m_parent;
size_t m_offset;
};
struct echoFilter : filter {
virtual void init(source *sourcer_);
virtual filterInstance *create();
echoFilter();
void setParams(float delay, float decay);
private:
friend struct echoFilterInstance;
float m_delay;
float m_decay;
};
}
#endif
|
namespace a {
struct source;
struct filterInstance {
virtual void filter(float *buffer, size_t samples, bool strero, float sampleRate) = 0;
virtual ~filterInstance();
};
struct filter {
virtual void init(source *audioSource);
virtual filterInstance *create() = 0;
virtual ~filter();
};
struct echoFilter;
struct echoFilterInstance : filterInstance {
virtual void filter(float *buffer, size_t samples, bool stereo, float sampleRate);
virtual ~echoFilterInstance();
echoFilterInstance(echoFilter *parent);
private:
u::vector<float> m_buffer;
echoFilter *m_parent;
size_t m_offset;
};
struct echoFilter : filter {
virtual void init(source *sourcer_);
virtual filterInstance *create();
echoFilter();
void setParams(float delay, float decay);
private:
friend struct echoFilterInstance;
float m_delay;
float m_decay;
};
}
#endif
|
Revert "Testing GPG key on github"
|
Revert "Testing GPG key on github"
This reverts commit 22b0c196d0aa47ffbd016da66e98207b80bcdc82.
|
C
|
mit
|
graphitemaster/neothyne,graphitemaster/neothyne
|
c
|
## Code Before:
namespace a {
struct source;
struct filterInstance {
virtual void filter(float *buffer, size_t samples, bool strero, float sampleRate) = 0;
virtual ~filterInstance();
};
struct filter {
virtual void init(source *);
virtual filterInstance *create() = 0;
virtual ~filter();
};
struct echoFilter;
struct echoFilterInstance : filterInstance {
virtual void filter(float *buffer, size_t samples, bool stereo, float sampleRate);
virtual ~echoFilterInstance();
echoFilterInstance(echoFilter *parent);
private:
u::vector<float> m_buffer;
echoFilter *m_parent;
size_t m_offset;
};
struct echoFilter : filter {
virtual void init(source *sourcer_);
virtual filterInstance *create();
echoFilter();
void setParams(float delay, float decay);
private:
friend struct echoFilterInstance;
float m_delay;
float m_decay;
};
}
#endif
## Instruction:
Revert "Testing GPG key on github"
This reverts commit 22b0c196d0aa47ffbd016da66e98207b80bcdc82.
## Code After:
namespace a {
struct source;
struct filterInstance {
virtual void filter(float *buffer, size_t samples, bool strero, float sampleRate) = 0;
virtual ~filterInstance();
};
struct filter {
virtual void init(source *audioSource);
virtual filterInstance *create() = 0;
virtual ~filter();
};
struct echoFilter;
struct echoFilterInstance : filterInstance {
virtual void filter(float *buffer, size_t samples, bool stereo, float sampleRate);
virtual ~echoFilterInstance();
echoFilterInstance(echoFilter *parent);
private:
u::vector<float> m_buffer;
echoFilter *m_parent;
size_t m_offset;
};
struct echoFilter : filter {
virtual void init(source *sourcer_);
virtual filterInstance *create();
echoFilter();
void setParams(float delay, float decay);
private:
friend struct echoFilterInstance;
float m_delay;
float m_decay;
};
}
#endif
|
# ... existing code ...
};
struct filter {
virtual void init(source *audioSource);
virtual filterInstance *create() = 0;
virtual ~filter();
};
# ... rest of the code ...
|
c436708c65be210d2ab761a02df2b6c06bd0a85b
|
test/Lexer/block_cmt_end.c
|
test/Lexer/block_cmt_end.c
|
/*
RUN: %clang_cc1 -E -trigraphs %s | grep bar
RUN: %clang_cc1 -E -trigraphs %s | grep foo
RUN: %clang_cc1 -E -trigraphs %s | not grep abc
RUN: %clang_cc1 -E -trigraphs %s | not grep xyz
RUN: %clang_cc1 -fsyntax-only -trigraphs -verify %s
*/
// This is a simple comment, /*/ does not end a comment, the trailing */ does.
int i = /*/ */ 1;
/* abc
next comment ends with normal escaped newline:
*/
/* expected-warning {{escaped newline}} expected-warning {{backslash and newline}} *\
/
int bar /* expected-error {{expected ';' after top level declarator}} */
/* xyz
next comment ends with a trigraph escaped newline: */
/* expected-warning {{escaped newline between}} expected-warning {{backslash and newline separated by space}} expected-warning {{trigraph ends block comment}} *??/
/
foo
// rdar://6060752 - We should not get warnings about trigraphs in comments:
// '????'
/* ???? */
|
/*
RUN: %clang_cc1 -E -trigraphs %s | grep bar
RUN: %clang_cc1 -E -trigraphs %s | grep foo
RUN: %clang_cc1 -E -trigraphs %s | not grep qux
RUN: %clang_cc1 -E -trigraphs %s | not grep xyz
RUN: %clang_cc1 -fsyntax-only -trigraphs -verify %s
*/
// This is a simple comment, /*/ does not end a comment, the trailing */ does.
int i = /*/ */ 1;
/* qux
next comment ends with normal escaped newline:
*/
/* expected-warning {{escaped newline}} expected-warning {{backslash and newline}} *\
/
int bar /* expected-error {{expected ';' after top level declarator}} */
/* xyz
next comment ends with a trigraph escaped newline: */
/* expected-warning {{escaped newline between}} expected-warning {{backslash and newline separated by space}} expected-warning {{trigraph ends block comment}} *??/
/
foo
// rdar://6060752 - We should not get warnings about trigraphs in comments:
// '????'
/* ???? */
|
Change magic string "abc" to better magic string "qux".
|
Change magic string "abc" to better magic string "qux".
Wait, what?
So, we run Clang (and LLVM) tests in an environment where the md5sum of the
input files becomes a component of the path. When testing the preprocessor,
the path becomes part of the output (in line directives). In this test, we
were grepping for the absence of "abc" in the output. When the stars aligned
properly, the md5sum component of the path contained "abc" and the test
failed. Oops.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@131147 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang
|
c
|
## Code Before:
/*
RUN: %clang_cc1 -E -trigraphs %s | grep bar
RUN: %clang_cc1 -E -trigraphs %s | grep foo
RUN: %clang_cc1 -E -trigraphs %s | not grep abc
RUN: %clang_cc1 -E -trigraphs %s | not grep xyz
RUN: %clang_cc1 -fsyntax-only -trigraphs -verify %s
*/
// This is a simple comment, /*/ does not end a comment, the trailing */ does.
int i = /*/ */ 1;
/* abc
next comment ends with normal escaped newline:
*/
/* expected-warning {{escaped newline}} expected-warning {{backslash and newline}} *\
/
int bar /* expected-error {{expected ';' after top level declarator}} */
/* xyz
next comment ends with a trigraph escaped newline: */
/* expected-warning {{escaped newline between}} expected-warning {{backslash and newline separated by space}} expected-warning {{trigraph ends block comment}} *??/
/
foo
// rdar://6060752 - We should not get warnings about trigraphs in comments:
// '????'
/* ???? */
## Instruction:
Change magic string "abc" to better magic string "qux".
Wait, what?
So, we run Clang (and LLVM) tests in an environment where the md5sum of the
input files becomes a component of the path. When testing the preprocessor,
the path becomes part of the output (in line directives). In this test, we
were grepping for the absence of "abc" in the output. When the stars aligned
properly, the md5sum component of the path contained "abc" and the test
failed. Oops.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@131147 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
/*
RUN: %clang_cc1 -E -trigraphs %s | grep bar
RUN: %clang_cc1 -E -trigraphs %s | grep foo
RUN: %clang_cc1 -E -trigraphs %s | not grep qux
RUN: %clang_cc1 -E -trigraphs %s | not grep xyz
RUN: %clang_cc1 -fsyntax-only -trigraphs -verify %s
*/
// This is a simple comment, /*/ does not end a comment, the trailing */ does.
int i = /*/ */ 1;
/* qux
next comment ends with normal escaped newline:
*/
/* expected-warning {{escaped newline}} expected-warning {{backslash and newline}} *\
/
int bar /* expected-error {{expected ';' after top level declarator}} */
/* xyz
next comment ends with a trigraph escaped newline: */
/* expected-warning {{escaped newline between}} expected-warning {{backslash and newline separated by space}} expected-warning {{trigraph ends block comment}} *??/
/
foo
// rdar://6060752 - We should not get warnings about trigraphs in comments:
// '????'
/* ???? */
|
// ... existing code ...
/*
RUN: %clang_cc1 -E -trigraphs %s | grep bar
RUN: %clang_cc1 -E -trigraphs %s | grep foo
RUN: %clang_cc1 -E -trigraphs %s | not grep qux
RUN: %clang_cc1 -E -trigraphs %s | not grep xyz
RUN: %clang_cc1 -fsyntax-only -trigraphs -verify %s
*/
// ... modified code ...
// This is a simple comment, /*/ does not end a comment, the trailing */ does.
int i = /*/ */ 1;
/* qux
next comment ends with normal escaped newline:
*/
...
// rdar://6060752 - We should not get warnings about trigraphs in comments:
// '????'
/* ???? */
// ... rest of the code ...
|
c0595b7f31ef5df5246a565c8bb6bcd9d1a7fe5b
|
core/src/main/java/com/yandex/yoctodb/query/simple/SimpleNotCondition.java
|
core/src/main/java/com/yandex/yoctodb/query/simple/SimpleNotCondition.java
|
/*
* (C) YANDEX LLC, 2014-2015
*
* The Source Code called "YoctoDB" available at
* https://bitbucket.org/yandex/yoctodb is subject to the terms of the
* Mozilla Public License, v. 2.0 (hereinafter referred to as the "License").
*
* A copy of the License is also available at http://mozilla.org/MPL/2.0/.
*/
package com.yandex.yoctodb.query.simple;
import com.yandex.yoctodb.query.Condition;
import com.yandex.yoctodb.query.QueryContext;
import com.yandex.yoctodb.query.TermCondition;
import com.yandex.yoctodb.util.mutable.BitSet;
import net.jcip.annotations.Immutable;
import org.jetbrains.annotations.NotNull;
/**
* Condition negation
*
* @author incubos
*/
@Immutable
public final class SimpleNotCondition implements Condition {
@NotNull
private final TermCondition delegate;
public SimpleNotCondition(
@NotNull
final TermCondition delegate) {
this.delegate = delegate;
}
@Override
public boolean set(
@NotNull
final QueryContext ctx,
@NotNull
final BitSet to) {
delegate.set(ctx, to);
return to.inverse();
}
}
|
/*
* (C) YANDEX LLC, 2014-2015
*
* The Source Code called "YoctoDB" available at
* https://bitbucket.org/yandex/yoctodb is subject to the terms of the
* Mozilla Public License, v. 2.0 (hereinafter referred to as the "License").
*
* A copy of the License is also available at http://mozilla.org/MPL/2.0/.
*/
package com.yandex.yoctodb.query.simple;
import com.yandex.yoctodb.query.Condition;
import com.yandex.yoctodb.query.QueryContext;
import com.yandex.yoctodb.util.mutable.BitSet;
import net.jcip.annotations.Immutable;
import org.jetbrains.annotations.NotNull;
/**
* Condition negation
*
* @author incubos
*/
@Immutable
public final class SimpleNotCondition implements Condition {
@NotNull
private final Condition delegate;
public SimpleNotCondition(
@NotNull
final Condition delegate) {
this.delegate = delegate;
}
@Override
public boolean set(
@NotNull
final QueryContext ctx,
@NotNull
final BitSet to) {
delegate.set(ctx, to);
return to.inverse();
}
}
|
Allow to use not with general conditions
|
Allow to use not with general conditions
|
Java
|
mpl-2.0
|
incubos/yoctodb
|
java
|
## Code Before:
/*
* (C) YANDEX LLC, 2014-2015
*
* The Source Code called "YoctoDB" available at
* https://bitbucket.org/yandex/yoctodb is subject to the terms of the
* Mozilla Public License, v. 2.0 (hereinafter referred to as the "License").
*
* A copy of the License is also available at http://mozilla.org/MPL/2.0/.
*/
package com.yandex.yoctodb.query.simple;
import com.yandex.yoctodb.query.Condition;
import com.yandex.yoctodb.query.QueryContext;
import com.yandex.yoctodb.query.TermCondition;
import com.yandex.yoctodb.util.mutable.BitSet;
import net.jcip.annotations.Immutable;
import org.jetbrains.annotations.NotNull;
/**
* Condition negation
*
* @author incubos
*/
@Immutable
public final class SimpleNotCondition implements Condition {
@NotNull
private final TermCondition delegate;
public SimpleNotCondition(
@NotNull
final TermCondition delegate) {
this.delegate = delegate;
}
@Override
public boolean set(
@NotNull
final QueryContext ctx,
@NotNull
final BitSet to) {
delegate.set(ctx, to);
return to.inverse();
}
}
## Instruction:
Allow to use not with general conditions
## Code After:
/*
* (C) YANDEX LLC, 2014-2015
*
* The Source Code called "YoctoDB" available at
* https://bitbucket.org/yandex/yoctodb is subject to the terms of the
* Mozilla Public License, v. 2.0 (hereinafter referred to as the "License").
*
* A copy of the License is also available at http://mozilla.org/MPL/2.0/.
*/
package com.yandex.yoctodb.query.simple;
import com.yandex.yoctodb.query.Condition;
import com.yandex.yoctodb.query.QueryContext;
import com.yandex.yoctodb.util.mutable.BitSet;
import net.jcip.annotations.Immutable;
import org.jetbrains.annotations.NotNull;
/**
* Condition negation
*
* @author incubos
*/
@Immutable
public final class SimpleNotCondition implements Condition {
@NotNull
private final Condition delegate;
public SimpleNotCondition(
@NotNull
final Condition delegate) {
this.delegate = delegate;
}
@Override
public boolean set(
@NotNull
final QueryContext ctx,
@NotNull
final BitSet to) {
delegate.set(ctx, to);
return to.inverse();
}
}
|
// ... existing code ...
import com.yandex.yoctodb.query.Condition;
import com.yandex.yoctodb.query.QueryContext;
import com.yandex.yoctodb.util.mutable.BitSet;
import net.jcip.annotations.Immutable;
import org.jetbrains.annotations.NotNull;
// ... modified code ...
@Immutable
public final class SimpleNotCondition implements Condition {
@NotNull
private final Condition delegate;
public SimpleNotCondition(
@NotNull
final Condition delegate) {
this.delegate = delegate;
}
// ... rest of the code ...
|
4545c54ecd4b9cbc13033008c78c403da996f990
|
CCKit/CCLoadingController.h
|
CCKit/CCLoadingController.h
|
//
// CCLoadingController.h
// CCKit
//
// Created by Leonardo Lobato on 3/15/13.
// Copyright (c) 2013 Cliq Consulting. All rights reserved.
//
#import <UIKit/UIKit.h>
@protocol CCLoadingControllerDelegate;
@interface CCLoadingController : NSObject
@property (nonatomic, readonly) UIView *loadingView;
@property (nonatomic, weak) id<CCLoadingControllerDelegate> delegate;
- (void)showLoadingView:(BOOL)show animated:(BOOL)animated;
@end
@protocol CCLoadingControllerDelegate <NSObject>
- (UIView *)parentViewForLoadingController:(CCLoadingController *)controller;
@optional;
- (UIView *)loadingControllerShouldBeDisplayedBelowView:(CCLoadingController *)controller;
- (NSString *)titleForLoadingViewOnLoadingController:(CCLoadingController *)controller;
@end
|
//
// CCLoadingController.h
// CCKit
//
// Created by Leonardo Lobato on 3/15/13.
// Copyright (c) 2013 Cliq Consulting. All rights reserved.
//
#import <UIKit/UIKit.h>
@protocol CCLoadingControllerDelegate;
@interface CCLoadingController : NSObject
@property (nonatomic, readonly) UIView *loadingView;
#if __has_feature(objc_arc)
@property (nonatomic, weak) id<CCLoadingControllerDelegate> delegate;
#else
@property (nonatomic, assign) id<CCLoadingControllerDelegate> delegate;
#endif
- (void)showLoadingView:(BOOL)show animated:(BOOL)animated;
@end
@protocol CCLoadingControllerDelegate <NSObject>
- (UIView *)parentViewForLoadingController:(CCLoadingController *)controller;
@optional;
- (UIView *)loadingControllerShouldBeDisplayedBelowView:(CCLoadingController *)controller;
- (NSString *)titleForLoadingViewOnLoadingController:(CCLoadingController *)controller;
@end
|
Fix build for non-arc projects
|
Fix build for non-arc projects
Does **not** add no-arc support. Project will leak.
|
C
|
mit
|
cliq/CCKit
|
c
|
## Code Before:
//
// CCLoadingController.h
// CCKit
//
// Created by Leonardo Lobato on 3/15/13.
// Copyright (c) 2013 Cliq Consulting. All rights reserved.
//
#import <UIKit/UIKit.h>
@protocol CCLoadingControllerDelegate;
@interface CCLoadingController : NSObject
@property (nonatomic, readonly) UIView *loadingView;
@property (nonatomic, weak) id<CCLoadingControllerDelegate> delegate;
- (void)showLoadingView:(BOOL)show animated:(BOOL)animated;
@end
@protocol CCLoadingControllerDelegate <NSObject>
- (UIView *)parentViewForLoadingController:(CCLoadingController *)controller;
@optional;
- (UIView *)loadingControllerShouldBeDisplayedBelowView:(CCLoadingController *)controller;
- (NSString *)titleForLoadingViewOnLoadingController:(CCLoadingController *)controller;
@end
## Instruction:
Fix build for non-arc projects
Does **not** add no-arc support. Project will leak.
## Code After:
//
// CCLoadingController.h
// CCKit
//
// Created by Leonardo Lobato on 3/15/13.
// Copyright (c) 2013 Cliq Consulting. All rights reserved.
//
#import <UIKit/UIKit.h>
@protocol CCLoadingControllerDelegate;
@interface CCLoadingController : NSObject
@property (nonatomic, readonly) UIView *loadingView;
#if __has_feature(objc_arc)
@property (nonatomic, weak) id<CCLoadingControllerDelegate> delegate;
#else
@property (nonatomic, assign) id<CCLoadingControllerDelegate> delegate;
#endif
- (void)showLoadingView:(BOOL)show animated:(BOOL)animated;
@end
@protocol CCLoadingControllerDelegate <NSObject>
- (UIView *)parentViewForLoadingController:(CCLoadingController *)controller;
@optional;
- (UIView *)loadingControllerShouldBeDisplayedBelowView:(CCLoadingController *)controller;
- (NSString *)titleForLoadingViewOnLoadingController:(CCLoadingController *)controller;
@end
|
# ... existing code ...
@interface CCLoadingController : NSObject
@property (nonatomic, readonly) UIView *loadingView;
#if __has_feature(objc_arc)
@property (nonatomic, weak) id<CCLoadingControllerDelegate> delegate;
#else
@property (nonatomic, assign) id<CCLoadingControllerDelegate> delegate;
#endif
- (void)showLoadingView:(BOOL)show animated:(BOOL)animated;
# ... rest of the code ...
|
194748bfbc67741275fd36eb2eaafbde55caeabb
|
django_emarsys/management/commands/emarsys_sync_events.py
|
django_emarsys/management/commands/emarsys_sync_events.py
|
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
|
from __future__ import unicode_literals
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
|
Fix issue with management command log output and non ascii event names
|
Fix issue with management command log output and non ascii event names
|
Python
|
mit
|
machtfit/django-emarsys,machtfit/django-emarsys
|
python
|
## Code Before:
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
## Instruction:
Fix issue with management command log output and non ascii event names
## Code After:
from __future__ import unicode_literals
from django.core.management import BaseCommand
from ...event import sync_events
class Command(BaseCommand):
def handle(self, *args, **options):
num_new_events, num_updated_ids, num_deleted_ids, \
unsynced_event_names = sync_events()
print("{} new events, {} event ids updated,"
" {} event ids deleted"
.format(num_new_events, num_updated_ids, num_deleted_ids))
if unsynced_event_names:
print("unsynced event names:\n {}"
.format('\n '.join(unsynced_event_names)))
|
...
from __future__ import unicode_literals
from django.core.management import BaseCommand
...
|
0aa1fb5d7f4eca6423a7d4b5cdd166bf29f48423
|
ordering/__init__.py
|
ordering/__init__.py
|
from fractions import Fraction
class Ordering:
_start = object()
_end = object()
def __init__(self):
self._labels = {
self._start: Fraction(0),
self._end: Fraction(1)
}
self._successors = {
self._start: self._end
}
self._predecessors = {
self._end: self._start
}
def insert_after(self, existing_item, new_item):
self._labels[new_item] = (self._labels[existing_item] + self._labels[self._successors[existing_item]]) / 2
self._successors[new_item] = self._successors[existing_item]
self._predecessors[new_item] = existing_item
self._predecessors[self._successors[existing_item]] = new_item
self._successors[existing_item] = new_item
def insert_before(self, existing_item, new_item):
self.insert_after(self._predecessors[existing_item], new_item)
def insert_start(self, new_item):
self.insert_after(self._start, new_item)
def insert_end(self, new_item):
self.insert_before(self._end, new_item)
def compare(self, left_item, right_item):
return self._labels[left_item] < self._labels[right_item]
|
from fractions import Fraction
from functools import total_ordering
class Ordering:
_start = object()
_end = object()
def __init__(self):
self._labels = {
self._start: Fraction(0),
self._end: Fraction(1)
}
self._successors = {
self._start: self._end
}
self._predecessors = {
self._end: self._start
}
def insert_after(self, existing_item, new_item):
self._labels[new_item] = (self._labels[existing_item] + self._labels[self._successors[existing_item]]) / 2
self._successors[new_item] = self._successors[existing_item]
self._predecessors[new_item] = existing_item
self._predecessors[self._successors[existing_item]] = new_item
self._successors[existing_item] = new_item
return OrderingItem(self, new_item)
def insert_before(self, existing_item, new_item):
return self.insert_after(self._predecessors[existing_item], new_item)
def insert_start(self, new_item):
return self.insert_after(self._start, new_item)
def insert_end(self, new_item):
return self.insert_before(self._end, new_item)
def compare(self, left_item, right_item):
return self._labels[left_item] < self._labels[right_item]
@total_ordering
class OrderingItem:
def __init__(self, ordering, item):
self.ordering = ordering
self.item = item
def __lt__(self, other):
return self.ordering.compare(self.item, other.item)
|
Add class representing an element in the ordering
|
Add class representing an element in the ordering
|
Python
|
mit
|
madman-bob/python-order-maintenance
|
python
|
## Code Before:
from fractions import Fraction
class Ordering:
_start = object()
_end = object()
def __init__(self):
self._labels = {
self._start: Fraction(0),
self._end: Fraction(1)
}
self._successors = {
self._start: self._end
}
self._predecessors = {
self._end: self._start
}
def insert_after(self, existing_item, new_item):
self._labels[new_item] = (self._labels[existing_item] + self._labels[self._successors[existing_item]]) / 2
self._successors[new_item] = self._successors[existing_item]
self._predecessors[new_item] = existing_item
self._predecessors[self._successors[existing_item]] = new_item
self._successors[existing_item] = new_item
def insert_before(self, existing_item, new_item):
self.insert_after(self._predecessors[existing_item], new_item)
def insert_start(self, new_item):
self.insert_after(self._start, new_item)
def insert_end(self, new_item):
self.insert_before(self._end, new_item)
def compare(self, left_item, right_item):
return self._labels[left_item] < self._labels[right_item]
## Instruction:
Add class representing an element in the ordering
## Code After:
from fractions import Fraction
from functools import total_ordering
class Ordering:
_start = object()
_end = object()
def __init__(self):
self._labels = {
self._start: Fraction(0),
self._end: Fraction(1)
}
self._successors = {
self._start: self._end
}
self._predecessors = {
self._end: self._start
}
def insert_after(self, existing_item, new_item):
self._labels[new_item] = (self._labels[existing_item] + self._labels[self._successors[existing_item]]) / 2
self._successors[new_item] = self._successors[existing_item]
self._predecessors[new_item] = existing_item
self._predecessors[self._successors[existing_item]] = new_item
self._successors[existing_item] = new_item
return OrderingItem(self, new_item)
def insert_before(self, existing_item, new_item):
return self.insert_after(self._predecessors[existing_item], new_item)
def insert_start(self, new_item):
return self.insert_after(self._start, new_item)
def insert_end(self, new_item):
return self.insert_before(self._end, new_item)
def compare(self, left_item, right_item):
return self._labels[left_item] < self._labels[right_item]
@total_ordering
class OrderingItem:
def __init__(self, ordering, item):
self.ordering = ordering
self.item = item
def __lt__(self, other):
return self.ordering.compare(self.item, other.item)
|
// ... existing code ...
from fractions import Fraction
from functools import total_ordering
class Ordering:
// ... modified code ...
self._predecessors[self._successors[existing_item]] = new_item
self._successors[existing_item] = new_item
return OrderingItem(self, new_item)
def insert_before(self, existing_item, new_item):
return self.insert_after(self._predecessors[existing_item], new_item)
def insert_start(self, new_item):
return self.insert_after(self._start, new_item)
def insert_end(self, new_item):
return self.insert_before(self._end, new_item)
def compare(self, left_item, right_item):
return self._labels[left_item] < self._labels[right_item]
@total_ordering
class OrderingItem:
def __init__(self, ordering, item):
self.ordering = ordering
self.item = item
def __lt__(self, other):
return self.ordering.compare(self.item, other.item)
// ... rest of the code ...
|
834b7ff81d6e2777d3952bb588a53f12f5ace5f5
|
setup.py
|
setup.py
|
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
|
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
Add a Python 3 classifier recommended by community
|
Add a Python 3 classifier recommended by community
|
Python
|
mit
|
rfk/regobj
|
python
|
## Code Before:
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
## Instruction:
Add a Python 3 classifier recommended by community
## Code After:
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "[email protected]"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
# ... existing code ...
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
# ... rest of the code ...
|
fb41f01360423d176864a4846d0e769d4df03978
|
penchy/tests/test_compat.py
|
penchy/tests/test_compat.py
|
from hashlib import sha1
from tempfile import TemporaryFile
from contextlib import contextmanager
from penchy.compat import unittest, nested, update_hasher
class NestedTest(unittest.TestCase):
def test_reraising_exception(self):
e = Exception('reraise this')
with self.assertRaises(Exception) as raised:
with nested(TemporaryFile(), TemporaryFile()) as (a, b):
raise e
self.assertEqual(raised.exception, e)
def test_raising_on_exit(self):
@contextmanager
def raising_cm(exception):
yield
raise exception
on_exit = Exception('throw on exit')
with self.assertRaises(Exception) as raised:
with nested(raising_cm(on_exit)):
pass
self.assertEqual(raised.exception, on_exit)
class HasherTest(unittest.TestCase):
def setUp(self):
self.control = sha1()
self.h = sha1()
def test_str_hash(self):
s = 'foo'
self.control.update(s)
update_hasher(self.h, s)
self.assertEqual(self.control.hexdigest(),
self.h.hexdigest())
def test_unicode_hash(self):
u = u'foo'
self.control.update(u.encode('utf8'))
update_hasher(self.h, u)
self.assertEqual(self.control.hexdigest(),
self.h.hexdigest())
|
from hashlib import sha1
from tempfile import TemporaryFile
from contextlib import contextmanager
from penchy.compat import unittest, nested, update_hasher, unicode_
class NestedTest(unittest.TestCase):
def test_reraising_exception(self):
e = Exception('reraise this')
with self.assertRaises(Exception) as raised:
with nested(TemporaryFile(), TemporaryFile()) as (a, b):
raise e
self.assertEqual(raised.exception, e)
def test_raising_on_exit(self):
@contextmanager
def raising_cm(exception):
yield
raise exception
on_exit = Exception('throw on exit')
with self.assertRaises(Exception) as raised:
with nested(raising_cm(on_exit)):
pass
self.assertEqual(raised.exception, on_exit)
class HasherTest(unittest.TestCase):
def setUp(self):
self.control = sha1()
self.h = sha1()
def test_str_hash(self):
s = str('foo')
update_hasher(self.h, s)
self.assertEqual(self.h.hexdigest(),
'0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33')
def test_unicode_hash(self):
u = unicode_('foo')
update_hasher(self.h, u)
self.assertEqual(self.h.hexdigest(),
'0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33')
|
Replace control hasher with constant hexdigest.
|
tests: Replace control hasher with constant hexdigest.
Signed-off-by: Michael Markert <[email protected]>
|
Python
|
mit
|
fhirschmann/penchy,fhirschmann/penchy
|
python
|
## Code Before:
from hashlib import sha1
from tempfile import TemporaryFile
from contextlib import contextmanager
from penchy.compat import unittest, nested, update_hasher
class NestedTest(unittest.TestCase):
def test_reraising_exception(self):
e = Exception('reraise this')
with self.assertRaises(Exception) as raised:
with nested(TemporaryFile(), TemporaryFile()) as (a, b):
raise e
self.assertEqual(raised.exception, e)
def test_raising_on_exit(self):
@contextmanager
def raising_cm(exception):
yield
raise exception
on_exit = Exception('throw on exit')
with self.assertRaises(Exception) as raised:
with nested(raising_cm(on_exit)):
pass
self.assertEqual(raised.exception, on_exit)
class HasherTest(unittest.TestCase):
def setUp(self):
self.control = sha1()
self.h = sha1()
def test_str_hash(self):
s = 'foo'
self.control.update(s)
update_hasher(self.h, s)
self.assertEqual(self.control.hexdigest(),
self.h.hexdigest())
def test_unicode_hash(self):
u = u'foo'
self.control.update(u.encode('utf8'))
update_hasher(self.h, u)
self.assertEqual(self.control.hexdigest(),
self.h.hexdigest())
## Instruction:
tests: Replace control hasher with constant hexdigest.
Signed-off-by: Michael Markert <[email protected]>
## Code After:
from hashlib import sha1
from tempfile import TemporaryFile
from contextlib import contextmanager
from penchy.compat import unittest, nested, update_hasher, unicode_
class NestedTest(unittest.TestCase):
def test_reraising_exception(self):
e = Exception('reraise this')
with self.assertRaises(Exception) as raised:
with nested(TemporaryFile(), TemporaryFile()) as (a, b):
raise e
self.assertEqual(raised.exception, e)
def test_raising_on_exit(self):
@contextmanager
def raising_cm(exception):
yield
raise exception
on_exit = Exception('throw on exit')
with self.assertRaises(Exception) as raised:
with nested(raising_cm(on_exit)):
pass
self.assertEqual(raised.exception, on_exit)
class HasherTest(unittest.TestCase):
def setUp(self):
self.control = sha1()
self.h = sha1()
def test_str_hash(self):
s = str('foo')
update_hasher(self.h, s)
self.assertEqual(self.h.hexdigest(),
'0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33')
def test_unicode_hash(self):
u = unicode_('foo')
update_hasher(self.h, u)
self.assertEqual(self.h.hexdigest(),
'0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33')
|
// ... existing code ...
from tempfile import TemporaryFile
from contextlib import contextmanager
from penchy.compat import unittest, nested, update_hasher, unicode_
class NestedTest(unittest.TestCase):
// ... modified code ...
self.h = sha1()
def test_str_hash(self):
s = str('foo')
update_hasher(self.h, s)
self.assertEqual(self.h.hexdigest(),
'0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33')
def test_unicode_hash(self):
u = unicode_('foo')
update_hasher(self.h, u)
self.assertEqual(self.h.hexdigest(),
'0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33')
// ... rest of the code ...
|
9467cfc4fa3f0bd2c269f3d7b61460ddc6851f9f
|
tests/test_dfw_uncomparables.py
|
tests/test_dfw_uncomparables.py
|
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
|
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
|
Add test for exception to uncomparable check
|
Add test for exception to uncomparable check
|
Python
|
bsd-3-clause
|
jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint
|
python
|
## Code Before:
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
## Instruction:
Add test for exception to uncomparable check
## Code After:
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
|
# ... existing code ...
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
# ... rest of the code ...
|
f3bd9e6b517533be5c43e106e5d5eb707653b893
|
testsuite/integration/clustering/src/test/java/org/jboss/as/test/clustering/twoclusters/bean/forwarding/ForwardingStatefulSBImpl.java
|
testsuite/integration/clustering/src/test/java/org/jboss/as/test/clustering/twoclusters/bean/forwarding/ForwardingStatefulSBImpl.java
|
package org.jboss.as.test.clustering.twoclusters.bean.forwarding;
import org.jboss.as.test.clustering.twoclusters.bean.stateful.RemoteStatefulSB;
import org.jboss.ejb3.annotation.Clustered;
import javax.ejb.Stateful;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
@Stateful
@Clustered
@TransactionAttribute(TransactionAttributeType.REQUIRED) // this is the default anyway
public class ForwardingStatefulSBImpl extends AbstractForwardingStatefulSBImpl implements RemoteStatefulSB {
}
|
package org.jboss.as.test.clustering.twoclusters.bean.forwarding;
import org.jboss.as.test.clustering.twoclusters.bean.stateful.RemoteStatefulSB;
import org.jboss.ejb3.annotation.Clustered;
import javax.ejb.Stateful;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
@Stateful
@Clustered
@TransactionAttribute(TransactionAttributeType.REQUIRED) // this is the default anyway
public class ForwardingStatefulSBImpl extends AbstractForwardingStatefulSBImpl implements RemoteStatefulSB {
// we need to override these methods so that the TransactionAttribute gets processed on this class!
@Override
public int getSerial()
{
return super.getSerial();
}
@Override
public int getSerialAndIncrement()
{
return super.getSerialAndIncrement();
}
@Override
public byte[] getCargo()
{
return super.getCargo();
}
}
|
Add in overrides for methods otherwise TransactionAttribute changes don't get applied.
|
[WFLY-4853] Add in overrides for methods otherwise TransactionAttribute changes don't get applied.
|
Java
|
lgpl-2.1
|
jstourac/wildfly,wildfly/wildfly,pferraro/wildfly,tadamski/wildfly,99sono/wildfly,iweiss/wildfly,pferraro/wildfly,99sono/wildfly,iweiss/wildfly,jstourac/wildfly,xasx/wildfly,iweiss/wildfly,tadamski/wildfly,99sono/wildfly,rhusar/wildfly,tadamski/wildfly,jstourac/wildfly,tomazzupan/wildfly,tomazzupan/wildfly,golovnin/wildfly,wildfly/wildfly,wildfly/wildfly,xasx/wildfly,jstourac/wildfly,rhusar/wildfly,golovnin/wildfly,wildfly/wildfly,golovnin/wildfly,pferraro/wildfly,tomazzupan/wildfly,iweiss/wildfly,rhusar/wildfly,rhusar/wildfly,pferraro/wildfly,xasx/wildfly
|
java
|
## Code Before:
package org.jboss.as.test.clustering.twoclusters.bean.forwarding;
import org.jboss.as.test.clustering.twoclusters.bean.stateful.RemoteStatefulSB;
import org.jboss.ejb3.annotation.Clustered;
import javax.ejb.Stateful;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
@Stateful
@Clustered
@TransactionAttribute(TransactionAttributeType.REQUIRED) // this is the default anyway
public class ForwardingStatefulSBImpl extends AbstractForwardingStatefulSBImpl implements RemoteStatefulSB {
}
## Instruction:
[WFLY-4853] Add in overrides for methods otherwise TransactionAttribute changes don't get applied.
## Code After:
package org.jboss.as.test.clustering.twoclusters.bean.forwarding;
import org.jboss.as.test.clustering.twoclusters.bean.stateful.RemoteStatefulSB;
import org.jboss.ejb3.annotation.Clustered;
import javax.ejb.Stateful;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
@Stateful
@Clustered
@TransactionAttribute(TransactionAttributeType.REQUIRED) // this is the default anyway
public class ForwardingStatefulSBImpl extends AbstractForwardingStatefulSBImpl implements RemoteStatefulSB {
// we need to override these methods so that the TransactionAttribute gets processed on this class!
@Override
public int getSerial()
{
return super.getSerial();
}
@Override
public int getSerialAndIncrement()
{
return super.getSerialAndIncrement();
}
@Override
public byte[] getCargo()
{
return super.getCargo();
}
}
|
# ... existing code ...
@Clustered
@TransactionAttribute(TransactionAttributeType.REQUIRED) // this is the default anyway
public class ForwardingStatefulSBImpl extends AbstractForwardingStatefulSBImpl implements RemoteStatefulSB {
// we need to override these methods so that the TransactionAttribute gets processed on this class!
@Override
public int getSerial()
{
return super.getSerial();
}
@Override
public int getSerialAndIncrement()
{
return super.getSerialAndIncrement();
}
@Override
public byte[] getCargo()
{
return super.getCargo();
}
}
# ... rest of the code ...
|
9f216f1fdde41730b2680eed2174b1ba75d923be
|
dataset/dataset/spiders/dataset_spider.py
|
dataset/dataset/spiders/dataset_spider.py
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
pages = 9466
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = []
for i in range(1, pages + 1):
start_urls.append('http://data.gc.ca/data/en/dataset?page=' + str(i))
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
Add to start urls to contain all dataset pages
|
Add to start urls to contain all dataset pages
|
Python
|
mit
|
MaxLikelihood/CODE
|
python
|
## Code Before:
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
## Instruction:
Add to start urls to contain all dataset pages
## Code After:
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
pages = 9466
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = []
for i in range(1, pages + 1):
start_urls.append('http://data.gc.ca/data/en/dataset?page=' + str(i))
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
# ... existing code ...
class DatasetSpider(CrawlSpider):
pages = 9466
name = 'dataset'
allowed_domains = ['data.gc.ca']
start_urls = []
for i in range(1, pages + 1):
start_urls.append('http://data.gc.ca/data/en/dataset?page=' + str(i))
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
# ... rest of the code ...
|
9ffc56e947dea40cd49c76beada2ec469a01f8f8
|
__init__.py
|
__init__.py
|
import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = '{}\{}'.format(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
|
import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
|
Make api file path OS safe
|
Make api file path OS safe
|
Python
|
mit
|
joshuamsmith/ConnectPyse
|
python
|
## Code Before:
import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = '{}\{}'.format(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
## Instruction:
Make api file path OS safe
## Code After:
import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
|
...
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
...
|
b3850c475e449c0c6182629aa7521f335e86b1e1
|
scrapy_local.py
|
scrapy_local.py
|
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'
|
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
Fix issue with scrapy local settings
|
Fix issue with scrapy local settings
|
Python
|
mit
|
comsaint/legco-watch,comsaint/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,comsaint/legco-watch
|
python
|
## Code Before:
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'
## Instruction:
Fix issue with scrapy local settings
## Code After:
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
# ... existing code ...
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# ... rest of the code ...
|
88877290116f8da110404ff96a3df8f508bc60a2
|
src/calculator/fourier/fourier_transform_fftw.h
|
src/calculator/fourier/fourier_transform_fftw.h
|
namespace bart {
namespace calculator {
namespace fourier {
class FourierTransformFFTW : public FourierTransformI {
public:
FourierTransformFFTW(const int n_samples)
: n_samples_(n_samples) {};
int n_samples() const { return n_samples_; }
private:
const int n_samples_{0};
};
} // namespace fourier
} // namespace calculator
} // namespace bart
#endif //BART_SRC_CALCULATOR_FOURIER_FOURIER_TRANSFORM_FFTW_H_
|
namespace bart {
namespace calculator {
namespace fourier {
class FourierTransformFFTW : public FourierTransformI {
public:
FourierTransformFFTW(const int n_samples);
int n_samples() const { return n_samples_; }
private:
const int n_samples_;
};
} // namespace fourier
} // namespace calculator
} // namespace bart
#endif //BART_SRC_CALCULATOR_FOURIER_FOURIER_TRANSFORM_FFTW_H_
|
Put constructor for FourierTransformFFTW in cc file.
|
Put constructor for FourierTransformFFTW in cc file.
|
C
|
mit
|
jsrehak/BART,jsrehak/BART
|
c
|
## Code Before:
namespace bart {
namespace calculator {
namespace fourier {
class FourierTransformFFTW : public FourierTransformI {
public:
FourierTransformFFTW(const int n_samples)
: n_samples_(n_samples) {};
int n_samples() const { return n_samples_; }
private:
const int n_samples_{0};
};
} // namespace fourier
} // namespace calculator
} // namespace bart
#endif //BART_SRC_CALCULATOR_FOURIER_FOURIER_TRANSFORM_FFTW_H_
## Instruction:
Put constructor for FourierTransformFFTW in cc file.
## Code After:
namespace bart {
namespace calculator {
namespace fourier {
class FourierTransformFFTW : public FourierTransformI {
public:
FourierTransformFFTW(const int n_samples);
int n_samples() const { return n_samples_; }
private:
const int n_samples_;
};
} // namespace fourier
} // namespace calculator
} // namespace bart
#endif //BART_SRC_CALCULATOR_FOURIER_FOURIER_TRANSFORM_FFTW_H_
|
# ... existing code ...
class FourierTransformFFTW : public FourierTransformI {
public:
FourierTransformFFTW(const int n_samples);
int n_samples() const { return n_samples_; }
private:
const int n_samples_;
};
} // namespace fourier
# ... rest of the code ...
|
aef60d17607a0819e24a2a61304bd5ca38289d50
|
scripts/slave/dart/dart_util.py
|
scripts/slave/dart/dart_util.py
|
import optparse
import os
import sys
from common import chromium_utils
def clobber():
print('Clobbereing platform: %s' % sys.platform)
if sys.platform in ('win32'):
release_dir = os.path.abspath('ReleaseIA32')
print('Removing directory %s' % release_dir)
chromium_utils.RemoveDirectory(release_dir)
debug_dir = os.path.abspath('DebugIA32')
print('Removing directory %s' % debug_dir)
chromium_utils.RemoveDirectory(debug_dir)
elif sys.platform in ('linux2'):
out_dir = os.path.abspath('out')
print('Removing directory %s' % out_dir)
chromium_utils.RemoveDirectory(out_dir)
elif sys.platform.startswith('darwin'):
xcode_dir = os.path.abspath('xcodebuild')
print('Removing directory %s' % xcode_dir)
chromium_utils.RemoveDirectory(xcode_dir)
else:
print("Platform not recognized")
return 0
def main():
parser = optparse.OptionParser()
parser.add_option('',
'--clobber',
default=False,
action='store_true',
help='Clobber the builder')
options, args = parser.parse_args()
# args unused, use.
args.append('')
# Determine what to do based on options passed in.
if options.clobber:
return clobber()
else:
print("Nothing to do")
if '__main__' == __name__ :
sys.exit(main())
|
import optparse
import subprocess
import sys
def clobber():
cmd = [sys.executable,
'./tools/clean_output_directory.py',
'--mode=all']
print 'Clobbering %s' % (' '.join(cmd))
return subprocess.call(cmd)
def main():
parser = optparse.OptionParser()
parser.add_option('',
'--clobber',
default=False,
action='store_true',
help='Clobber the builder')
options, args = parser.parse_args()
# args unused, use.
args.append('')
# Determine what to do based on options passed in.
if options.clobber:
return clobber()
else:
print("Nothing to do")
if '__main__' == __name__ :
sys.exit(main())
|
Use the new tools/clean_output_directory.py script for clobbering builders.
|
Use the new tools/clean_output_directory.py script for clobbering builders.
This will unify our clobbering functionality across builders that use annotated steps and builders with test setup in the buildbot source.
TBR=foo
Review URL: https://chromiumcodereview.appspot.com/10834305
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@151464 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
python
|
## Code Before:
import optparse
import os
import sys
from common import chromium_utils
def clobber():
print('Clobbereing platform: %s' % sys.platform)
if sys.platform in ('win32'):
release_dir = os.path.abspath('ReleaseIA32')
print('Removing directory %s' % release_dir)
chromium_utils.RemoveDirectory(release_dir)
debug_dir = os.path.abspath('DebugIA32')
print('Removing directory %s' % debug_dir)
chromium_utils.RemoveDirectory(debug_dir)
elif sys.platform in ('linux2'):
out_dir = os.path.abspath('out')
print('Removing directory %s' % out_dir)
chromium_utils.RemoveDirectory(out_dir)
elif sys.platform.startswith('darwin'):
xcode_dir = os.path.abspath('xcodebuild')
print('Removing directory %s' % xcode_dir)
chromium_utils.RemoveDirectory(xcode_dir)
else:
print("Platform not recognized")
return 0
def main():
parser = optparse.OptionParser()
parser.add_option('',
'--clobber',
default=False,
action='store_true',
help='Clobber the builder')
options, args = parser.parse_args()
# args unused, use.
args.append('')
# Determine what to do based on options passed in.
if options.clobber:
return clobber()
else:
print("Nothing to do")
if '__main__' == __name__ :
sys.exit(main())
## Instruction:
Use the new tools/clean_output_directory.py script for clobbering builders.
This will unify our clobbering functionality across builders that use annotated steps and builders with test setup in the buildbot source.
TBR=foo
Review URL: https://chromiumcodereview.appspot.com/10834305
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@151464 0039d316-1c4b-4281-b951-d872f2087c98
## Code After:
import optparse
import subprocess
import sys
def clobber():
cmd = [sys.executable,
'./tools/clean_output_directory.py',
'--mode=all']
print 'Clobbering %s' % (' '.join(cmd))
return subprocess.call(cmd)
def main():
parser = optparse.OptionParser()
parser.add_option('',
'--clobber',
default=False,
action='store_true',
help='Clobber the builder')
options, args = parser.parse_args()
# args unused, use.
args.append('')
# Determine what to do based on options passed in.
if options.clobber:
return clobber()
else:
print("Nothing to do")
if '__main__' == __name__ :
sys.exit(main())
|
# ... existing code ...
import optparse
import subprocess
import sys
def clobber():
cmd = [sys.executable,
'./tools/clean_output_directory.py',
'--mode=all']
print 'Clobbering %s' % (' '.join(cmd))
return subprocess.call(cmd)
def main():
parser = optparse.OptionParser()
# ... rest of the code ...
|
ea7177614dc2094e95aeea33f6249f14c792fee8
|
Discord/modules/ciphers.py
|
Discord/modules/ciphers.py
|
def encode_caesar(message, key):
encoded_message = ""
for character in message:
if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ?
encoded_message += character
continue
shifted = ord(character) + int(key)
if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'):
encoded_message += chr(shifted - 26)
else:
encoded_message += chr(shifted)
return encoded_message
def decode_caesar(message, key):
decoded_message = ""
for character in message:
if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ?
decoded_message += character
continue
shifted = ord(character) - int(key)
if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'):
decoded_message += chr(shifted + 26)
else:
decoded_message += chr(shifted)
return decoded_message
def brute_force_caesar(message):
decodes = ""
for key in range(26):
decodes += str(key) + ": " + decode_caesar(message, key) + '\n'
return decodes
|
def encode_caesar(message, key):
encoded_message = ""
for character in message:
if not character.isalpha() or not character.isascii():
encoded_message += character
continue
shifted = ord(character) + int(key)
if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'):
encoded_message += chr(shifted - 26)
else:
encoded_message += chr(shifted)
return encoded_message
def decode_caesar(message, key):
decoded_message = ""
for character in message:
if not character.isalpha() or not character.isascii():
decoded_message += character
continue
shifted = ord(character) - int(key)
if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'):
decoded_message += chr(shifted + 26)
else:
decoded_message += chr(shifted)
return decoded_message
def brute_force_caesar(message):
decodes = ""
for key in range(26):
decodes += str(key) + ": " + decode_caesar(message, key) + '\n'
return decodes
|
Use string methods for encode and decode caesar functions
|
[Discord] Use string methods for encode and decode caesar functions
To determine (in)valid characters to encode and decode
|
Python
|
mit
|
Harmon758/Harmonbot,Harmon758/Harmonbot
|
python
|
## Code Before:
def encode_caesar(message, key):
encoded_message = ""
for character in message:
if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ?
encoded_message += character
continue
shifted = ord(character) + int(key)
if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'):
encoded_message += chr(shifted - 26)
else:
encoded_message += chr(shifted)
return encoded_message
def decode_caesar(message, key):
decoded_message = ""
for character in message:
if not ('a' <= character <= 'z' or 'A' <= character <= 'Z'): # .isalpha() ?
decoded_message += character
continue
shifted = ord(character) - int(key)
if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'):
decoded_message += chr(shifted + 26)
else:
decoded_message += chr(shifted)
return decoded_message
def brute_force_caesar(message):
decodes = ""
for key in range(26):
decodes += str(key) + ": " + decode_caesar(message, key) + '\n'
return decodes
## Instruction:
[Discord] Use string methods for encode and decode caesar functions
To determine (in)valid characters to encode and decode
## Code After:
def encode_caesar(message, key):
encoded_message = ""
for character in message:
if not character.isalpha() or not character.isascii():
encoded_message += character
continue
shifted = ord(character) + int(key)
if character.islower() and shifted > ord('z') or character.isupper() and shifted > ord('Z'):
encoded_message += chr(shifted - 26)
else:
encoded_message += chr(shifted)
return encoded_message
def decode_caesar(message, key):
decoded_message = ""
for character in message:
if not character.isalpha() or not character.isascii():
decoded_message += character
continue
shifted = ord(character) - int(key)
if character.islower() and shifted < ord('a') or character.isupper() and shifted < ord('A'):
decoded_message += chr(shifted + 26)
else:
decoded_message += chr(shifted)
return decoded_message
def brute_force_caesar(message):
decodes = ""
for key in range(26):
decodes += str(key) + ": " + decode_caesar(message, key) + '\n'
return decodes
|
// ... existing code ...
def encode_caesar(message, key):
encoded_message = ""
for character in message:
if not character.isalpha() or not character.isascii():
encoded_message += character
continue
shifted = ord(character) + int(key)
// ... modified code ...
def decode_caesar(message, key):
decoded_message = ""
for character in message:
if not character.isalpha() or not character.isascii():
decoded_message += character
continue
shifted = ord(character) - int(key)
// ... rest of the code ...
|
81d7c793585ca66dc5f726bf9013212986f66176
|
core/src/main/java/com/emc/ia/sdk/support/io/RepeatableInputStream.java
|
core/src/main/java/com/emc/ia/sdk/support/io/RepeatableInputStream.java
|
/*
* Copyright (c) 2016 EMC Corporation. All Rights Reserved.
*/
package com.emc.ia.sdk.support.io;
import java.io.IOException;
import java.io.InputStream;
import java.util.function.Supplier;
import org.apache.commons.io.IOUtils;
/**
* Provide repeatable access to the same {@linkplain InputStream} by caching it in memory.
*/
public class RepeatableInputStream implements Supplier<InputStream> {
private final ByteArrayInputOutputStream provider = new ByteArrayInputOutputStream();
/**
* Provide repeatable access to the given input stream.
* @param source The input stream to make available for repeated access
* @throws IOException When an I/O error occurs
*/
public RepeatableInputStream(InputStream source) throws IOException {
IOUtils.copy(source, provider);
}
@Override
public InputStream get() {
return provider.getInputStream();
}
}
|
/*
* Copyright (c) 2016 EMC Corporation. All Rights Reserved.
*/
package com.emc.ia.sdk.support.io;
import java.io.IOException;
import java.io.InputStream;
import java.util.Objects;
import java.util.function.Supplier;
import org.apache.commons.io.IOUtils;
/**
* Provide repeatable access to the same {@linkplain InputStream} by caching it in memory.
*/
public class RepeatableInputStream implements Supplier<InputStream> {
private final ByteArrayInputOutputStream provider = new ByteArrayInputOutputStream();
/**
* Provide repeatable access to the given input stream.
* @param source The input stream to make available for repeated access. Must not be <code>null</code>
* @throws IOException When an I/O error occurs
*/
public RepeatableInputStream(InputStream source) throws IOException {
IOUtils.copy(Objects.requireNonNull(source), provider);
}
@Override
public InputStream get() {
return provider.getInputStream();
}
}
|
Add check for null parameter
|
Add check for null parameter
|
Java
|
mpl-2.0
|
kovaloid/infoarchive-sip-sdk,Enterprise-Content-Management/infoarchive-sip-sdk
|
java
|
## Code Before:
/*
* Copyright (c) 2016 EMC Corporation. All Rights Reserved.
*/
package com.emc.ia.sdk.support.io;
import java.io.IOException;
import java.io.InputStream;
import java.util.function.Supplier;
import org.apache.commons.io.IOUtils;
/**
* Provide repeatable access to the same {@linkplain InputStream} by caching it in memory.
*/
public class RepeatableInputStream implements Supplier<InputStream> {
private final ByteArrayInputOutputStream provider = new ByteArrayInputOutputStream();
/**
* Provide repeatable access to the given input stream.
* @param source The input stream to make available for repeated access
* @throws IOException When an I/O error occurs
*/
public RepeatableInputStream(InputStream source) throws IOException {
IOUtils.copy(source, provider);
}
@Override
public InputStream get() {
return provider.getInputStream();
}
}
## Instruction:
Add check for null parameter
## Code After:
/*
* Copyright (c) 2016 EMC Corporation. All Rights Reserved.
*/
package com.emc.ia.sdk.support.io;
import java.io.IOException;
import java.io.InputStream;
import java.util.Objects;
import java.util.function.Supplier;
import org.apache.commons.io.IOUtils;
/**
* Provide repeatable access to the same {@linkplain InputStream} by caching it in memory.
*/
public class RepeatableInputStream implements Supplier<InputStream> {
private final ByteArrayInputOutputStream provider = new ByteArrayInputOutputStream();
/**
* Provide repeatable access to the given input stream.
* @param source The input stream to make available for repeated access. Must not be <code>null</code>
* @throws IOException When an I/O error occurs
*/
public RepeatableInputStream(InputStream source) throws IOException {
IOUtils.copy(Objects.requireNonNull(source), provider);
}
@Override
public InputStream get() {
return provider.getInputStream();
}
}
|
// ... existing code ...
import java.io.IOException;
import java.io.InputStream;
import java.util.Objects;
import java.util.function.Supplier;
import org.apache.commons.io.IOUtils;
// ... modified code ...
/**
* Provide repeatable access to the given input stream.
* @param source The input stream to make available for repeated access. Must not be <code>null</code>
* @throws IOException When an I/O error occurs
*/
public RepeatableInputStream(InputStream source) throws IOException {
IOUtils.copy(Objects.requireNonNull(source), provider);
}
@Override
// ... rest of the code ...
|
4e6c901140a667612fd311a2f18e1b7c18bafcd0
|
tests/test_octadecane_benchmarks.py
|
tests/test_octadecane_benchmarks.py
|
""" This module runs the benchmark test suite. """
from .context import phaseflow
def test_lid_driven_cavity_benchmark__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.LidDrivenCavityBenchmarkSimulation())
def test_lid_driven_cavity_benchmark_with_solid_subdomain__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.LDCBenchmarkSimulationWithSolidSubdomain())
def test_heat_driven_cavity_benchmark_with_restart__ci__():
sim = phaseflow.octadecane_benchmarks.HeatDrivenCavityBenchmarkSimulation()
sim.prefix_output_dir_with_tempdir = True
sim.end_time = 2.*sim.timestep_size
sim.run(verify = False)
sim2 = phaseflow.octadecane_benchmarks.HeatDrivenCavityBenchmarkSimulation()
sim2.read_checkpoint(sim.latest_checkpoint_filepath)
assert(sim.state.time == sim2.old_state.time)
assert(all(sim.state.solution.leaf_node().vector() == sim2.old_state.solution.leaf_node().vector()))
sim.prefix_output_dir_with_tempdir = True
sim2.run(verify = True)
def test_stefan_problem_benchmark__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.StefanProblemBenchmarkSimulation())
|
""" This module runs the benchmark test suite. """
from .context import phaseflow
def test_lid_driven_cavity_benchmark_with_solid_subdomain__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.LDCBenchmarkSimulationWithSolidSubdomain())
def test_heat_driven_cavity_benchmark__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.HeatDrivenCavityBenchmarkSimulation())
def test_stefan_problem_benchmark_with_restart__ci__():
""" This tests that restarting does not affect time accuracy. """
sim = phaseflow.octadecane_benchmarks.StefanProblemBenchmarkSimulation()
sim.end_time = 0.01
sim.run(verify = False)
sim2 = phaseflow.octadecane_benchmarks.StefanProblemBenchmarkSimulation()
sim2.read_checkpoint(sim.latest_checkpoint_filepath)
assert(sim.state.time == sim2.old_state.time)
assert(all(sim.state.solution.leaf_node().vector() == sim2.old_state.solution.leaf_node().vector()))
sim2.prefix_output_dir_with_tempdir = True
sim2.run(verify = True)
|
Test restarting with Stefan problem instead of heat driven cavity. Removed some redundant tests
|
Test restarting with Stefan problem instead of heat driven cavity. Removed some redundant tests
|
Python
|
mit
|
geo-fluid-dynamics/phaseflow-fenics
|
python
|
## Code Before:
""" This module runs the benchmark test suite. """
from .context import phaseflow
def test_lid_driven_cavity_benchmark__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.LidDrivenCavityBenchmarkSimulation())
def test_lid_driven_cavity_benchmark_with_solid_subdomain__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.LDCBenchmarkSimulationWithSolidSubdomain())
def test_heat_driven_cavity_benchmark_with_restart__ci__():
sim = phaseflow.octadecane_benchmarks.HeatDrivenCavityBenchmarkSimulation()
sim.prefix_output_dir_with_tempdir = True
sim.end_time = 2.*sim.timestep_size
sim.run(verify = False)
sim2 = phaseflow.octadecane_benchmarks.HeatDrivenCavityBenchmarkSimulation()
sim2.read_checkpoint(sim.latest_checkpoint_filepath)
assert(sim.state.time == sim2.old_state.time)
assert(all(sim.state.solution.leaf_node().vector() == sim2.old_state.solution.leaf_node().vector()))
sim.prefix_output_dir_with_tempdir = True
sim2.run(verify = True)
def test_stefan_problem_benchmark__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.StefanProblemBenchmarkSimulation())
## Instruction:
Test restarting with Stefan problem instead of heat driven cavity. Removed some redundant tests
## Code After:
""" This module runs the benchmark test suite. """
from .context import phaseflow
def test_lid_driven_cavity_benchmark_with_solid_subdomain__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.LDCBenchmarkSimulationWithSolidSubdomain())
def test_heat_driven_cavity_benchmark__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.HeatDrivenCavityBenchmarkSimulation())
def test_stefan_problem_benchmark_with_restart__ci__():
""" This tests that restarting does not affect time accuracy. """
sim = phaseflow.octadecane_benchmarks.StefanProblemBenchmarkSimulation()
sim.end_time = 0.01
sim.run(verify = False)
sim2 = phaseflow.octadecane_benchmarks.StefanProblemBenchmarkSimulation()
sim2.read_checkpoint(sim.latest_checkpoint_filepath)
assert(sim.state.time == sim2.old_state.time)
assert(all(sim.state.solution.leaf_node().vector() == sim2.old_state.solution.leaf_node().vector()))
sim2.prefix_output_dir_with_tempdir = True
sim2.run(verify = True)
|
...
""" This module runs the benchmark test suite. """
from .context import phaseflow
def test_lid_driven_cavity_benchmark_with_solid_subdomain__ci__():
...
phaseflow.octadecane_benchmarks.LDCBenchmarkSimulationWithSolidSubdomain())
def test_heat_driven_cavity_benchmark__ci__():
phaseflow.helpers.run_simulation_with_temporary_output(
phaseflow.octadecane_benchmarks.HeatDrivenCavityBenchmarkSimulation())
def test_stefan_problem_benchmark_with_restart__ci__():
""" This tests that restarting does not affect time accuracy. """
sim = phaseflow.octadecane_benchmarks.StefanProblemBenchmarkSimulation()
sim.end_time = 0.01
sim.run(verify = False)
sim2 = phaseflow.octadecane_benchmarks.StefanProblemBenchmarkSimulation()
sim2.read_checkpoint(sim.latest_checkpoint_filepath)
assert(sim.state.time == sim2.old_state.time)
assert(all(sim.state.solution.leaf_node().vector() == sim2.old_state.solution.leaf_node().vector()))
sim2.prefix_output_dir_with_tempdir = True
sim2.run(verify = True)
...
|
9db490d5d175f108231cc87afd87a593359837e8
|
app/views.py
|
app/views.py
|
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
@app.route('/')
@app.route('/index')
def index():
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
@app.route('/')
@app.route('/index')
def index():
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
Fix the disconnect after 8 hours bug.
|
Fix the disconnect after 8 hours bug.
|
Python
|
mit
|
jbwhit/hammer-pricer,jbwhit/hammer-pricer
|
python
|
## Code Before:
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
@app.route('/')
@app.route('/index')
def index():
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
## Instruction:
Fix the disconnect after 8 hours bug.
## Code After:
import os
from flask import render_template, jsonify, request
from app import app
import pymysql as mdb
@app.route('/')
@app.route('/index')
def index():
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
rows = cur.fetchall()
for key in rows:
key['thumb'] = key['image'].split(".")[2] + "-thumb.jpg"
return render_template('destination2.html', auctions=rows)
@app.route('/slides')
def cities_page():
return render_template('slides_wide.html')
@app.route("/slides_wide", methods=["GET"])
def slides_wide():
title="HammerPricer Slides"
return render_template("slides_wide.html", title=title)
|
# ... existing code ...
from app import app
import pymysql as mdb
@app.route('/')
@app.route('/index')
def index():
con = mdb.connect('localhost', "root", "ozfgefgvrwix", 'test1')
with con:
cur = con.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * FROM Auctions LIMIT 12")
# ... rest of the code ...
|
e76ca364ab979e309d34ff458ef2629145a52ce2
|
magnum/db/sqlalchemy/alembic/versions/a1136d335540_add_docker_storage_driver_column.py
|
magnum/db/sqlalchemy/alembic/versions/a1136d335540_add_docker_storage_driver_column.py
|
# revision identifiers, used by Alembic.
revision = 'a1136d335540'
down_revision = 'd072f58ab240'
from alembic import op
import sqlalchemy as sa
docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay',
name='docker_storage_driver')
def upgrade():
op.add_column('baymodel', sa.Column('docker_storage_driver',
docker_storage_driver_enum,
nullable=True))
|
# revision identifiers, used by Alembic.
revision = 'a1136d335540'
down_revision = 'd072f58ab240'
from alembic import op
import sqlalchemy as sa
docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay',
name='docker_storage_driver')
def upgrade():
docker_storage_driver_enum.create(op.get_bind(), checkfirst=True)
op.add_column('baymodel', sa.Column('docker_storage_driver',
docker_storage_driver_enum,
nullable=True))
|
Fix for enum type docker_storage_driver
|
Fix for enum type docker_storage_driver
Create enum type "docker_storage_driver" for migration
This is fixing
oslo_db.exception.DBError: (psycopg2.ProgrammingError) type
"docker_storage_driver" does not exist
Closes-Bug: #1609776
Change-Id: I92d427e90bd73b4114d8688d3761cabac450fc9d
|
Python
|
apache-2.0
|
openstack/magnum,openstack/magnum,ArchiFleKs/magnum,ArchiFleKs/magnum
|
python
|
## Code Before:
# revision identifiers, used by Alembic.
revision = 'a1136d335540'
down_revision = 'd072f58ab240'
from alembic import op
import sqlalchemy as sa
docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay',
name='docker_storage_driver')
def upgrade():
op.add_column('baymodel', sa.Column('docker_storage_driver',
docker_storage_driver_enum,
nullable=True))
## Instruction:
Fix for enum type docker_storage_driver
Create enum type "docker_storage_driver" for migration
This is fixing
oslo_db.exception.DBError: (psycopg2.ProgrammingError) type
"docker_storage_driver" does not exist
Closes-Bug: #1609776
Change-Id: I92d427e90bd73b4114d8688d3761cabac450fc9d
## Code After:
# revision identifiers, used by Alembic.
revision = 'a1136d335540'
down_revision = 'd072f58ab240'
from alembic import op
import sqlalchemy as sa
docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay',
name='docker_storage_driver')
def upgrade():
docker_storage_driver_enum.create(op.get_bind(), checkfirst=True)
op.add_column('baymodel', sa.Column('docker_storage_driver',
docker_storage_driver_enum,
nullable=True))
|
# ... existing code ...
def upgrade():
docker_storage_driver_enum.create(op.get_bind(), checkfirst=True)
op.add_column('baymodel', sa.Column('docker_storage_driver',
docker_storage_driver_enum,
nullable=True))
# ... rest of the code ...
|
f12dcc72d9b48b5f962dcaf33636de7ddff7f075
|
proxy/src/test/java/net/md_5/bungee/ThrottleTest.java
|
proxy/src/test/java/net/md_5/bungee/ThrottleTest.java
|
package net.md_5.bungee;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.junit.Assert;
import org.junit.Test;
public class ThrottleTest
{
@Test
public void testThrottle() throws InterruptedException, UnknownHostException
{
ConnectionThrottle throttle = new ConnectionThrottle( 5 );
InetAddress address = InetAddress.getLocalHost();
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Assert.assertTrue( "Address should be throttled", throttle.throttle( address ) );
throttle.unthrottle( address );
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Thread.sleep( 15 );
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
}
}
|
package net.md_5.bungee;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.junit.Assert;
import org.junit.Test;
public class ThrottleTest
{
@Test
public void testThrottle() throws InterruptedException, UnknownHostException
{
ConnectionThrottle throttle = new ConnectionThrottle( 5 );
InetAddress address;
try {
address = InetAddress.getLocalHost();
} catch (UnknownHostException ex) {
address = InetAddress.getByName( null );
}
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Assert.assertTrue( "Address should be throttled", throttle.throttle( address ) );
throttle.unthrottle( address );
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Thread.sleep( 15 );
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
}
}
|
Fix compile error when compiling on OS X
|
Fix compile error when compiling on OS X
|
Java
|
bsd-3-clause
|
GamesConMCGames/Bungeecord,ewized/BungeeCord,PrisonPvP/BungeeCord,LinEvil/BungeeCord,GingerGeek/BungeeCord,ewized/BungeeCord,starlis/BungeeCord,mariolars/BungeeCord,ConnorLinfoot/BungeeCord,LolnetModPack/BungeeCord,TCPR/BungeeCord,ConnorLinfoot/BungeeCord,TCPR/BungeeCord,dentmaged/BungeeCord,btilm305/BungeeCord,mariolars/BungeeCord,xxyy/BungeeCord,btilm305/BungeeCord,GingerGeek/BungeeCord,Yive/BungeeCord,LinEvil/BungeeCord,mariolars/BungeeCord,LetsPlayOnline/BungeeJumper,LolnetModPack/BungeeCord,LinEvil/BungeeCord,TCPR/BungeeCord,GamesConMCGames/Bungeecord,Xetius/BungeeCord,dentmaged/BungeeCord,starlis/BungeeCord,GamesConMCGames/Bungeecord,BlueAnanas/BungeeCord,LetsPlayOnline/BungeeJumper,XMeowTW/BungeeCord,xxyy/BungeeCord,btilm305/BungeeCord,XMeowTW/BungeeCord,Yive/BungeeCord,PrisonPvP/BungeeCord,ewized/BungeeCord,dentmaged/BungeeCord,ConnorLinfoot/BungeeCord,LolnetModPack/BungeeCord,BlueAnanas/BungeeCord,XMeowTW/BungeeCord,PrisonPvP/BungeeCord,Xetius/BungeeCord,GingerGeek/BungeeCord,xxyy/BungeeCord,BlueAnanas/BungeeCord,Yive/BungeeCord,LetsPlayOnline/BungeeJumper,starlis/BungeeCord,Xetius/BungeeCord
|
java
|
## Code Before:
package net.md_5.bungee;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.junit.Assert;
import org.junit.Test;
public class ThrottleTest
{
@Test
public void testThrottle() throws InterruptedException, UnknownHostException
{
ConnectionThrottle throttle = new ConnectionThrottle( 5 );
InetAddress address = InetAddress.getLocalHost();
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Assert.assertTrue( "Address should be throttled", throttle.throttle( address ) );
throttle.unthrottle( address );
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Thread.sleep( 15 );
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
}
}
## Instruction:
Fix compile error when compiling on OS X
## Code After:
package net.md_5.bungee;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.junit.Assert;
import org.junit.Test;
public class ThrottleTest
{
@Test
public void testThrottle() throws InterruptedException, UnknownHostException
{
ConnectionThrottle throttle = new ConnectionThrottle( 5 );
InetAddress address;
try {
address = InetAddress.getLocalHost();
} catch (UnknownHostException ex) {
address = InetAddress.getByName( null );
}
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Assert.assertTrue( "Address should be throttled", throttle.throttle( address ) );
throttle.unthrottle( address );
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Thread.sleep( 15 );
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
}
}
|
...
public void testThrottle() throws InterruptedException, UnknownHostException
{
ConnectionThrottle throttle = new ConnectionThrottle( 5 );
InetAddress address;
try {
address = InetAddress.getLocalHost();
} catch (UnknownHostException ex) {
address = InetAddress.getByName( null );
}
Assert.assertFalse( "Address should not be throttled", throttle.throttle( address ) );
Assert.assertTrue( "Address should be throttled", throttle.throttle( address ) );
...
|
5f522e96c97ec9fb26c18ea34dc2267dadbfe6a7
|
src/com/obidea/semantika/mapping/parser/r2rml/R2RmlTemplate.java
|
src/com/obidea/semantika/mapping/parser/r2rml/R2RmlTemplate.java
|
package com.obidea.semantika.mapping.parser.r2rml;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.obidea.semantika.util.StringUtils;
public class R2RmlTemplate
{
private static final Pattern columnInCurlyBraces = Pattern.compile("\\{([^\\}]+)\\}");
private int mIndex = 1;
private String mTemplateString;
private List<String> mColumnNames = new ArrayList<String>();
public R2RmlTemplate(String templateString)
{
process(templateString);
}
public String getTemplateString()
{
return mTemplateString;
}
public List<String> getColumnNames()
{
return mColumnNames;
}
private void process(String templateString)
{
Matcher m = columnInCurlyBraces.matcher(templateString);
while (m.find()) {
String arg = m.group(2);
if (!StringUtils.isEmpty(arg)) {
mTemplateString.replace(arg, mIndex+"");
mColumnNames.add(arg);
mIndex++;
}
}
}
}
|
package com.obidea.semantika.mapping.parser.r2rml;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.obidea.semantika.util.StringUtils;
public class R2RmlTemplate
{
private static final Pattern columnInCurlyBraces = Pattern.compile("\\{([^\\}]+)\\}");
private int mIndex = 1;
private String mTemplateString;
private List<String> mColumnNames = new ArrayList<String>();
public R2RmlTemplate(String templateString)
{
mTemplateString = templateString;
process(templateString);
}
public String getTemplateString()
{
return mTemplateString;
}
public List<String> getColumnNames()
{
return mColumnNames;
}
private void process(String templateString)
{
Matcher m = columnInCurlyBraces.matcher(templateString);
while (m.find()) {
String arg = m.group(2);
if (!StringUtils.isEmpty(arg)) {
mTemplateString.replace(arg, mIndex+"");
mColumnNames.add(arg);
mIndex++;
}
}
}
}
|
Fix null pointer exception because of the missing value assignment.
|
Fix null pointer exception because of the missing value assignment.
|
Java
|
apache-2.0
|
obidea/semantika
|
java
|
## Code Before:
package com.obidea.semantika.mapping.parser.r2rml;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.obidea.semantika.util.StringUtils;
public class R2RmlTemplate
{
private static final Pattern columnInCurlyBraces = Pattern.compile("\\{([^\\}]+)\\}");
private int mIndex = 1;
private String mTemplateString;
private List<String> mColumnNames = new ArrayList<String>();
public R2RmlTemplate(String templateString)
{
process(templateString);
}
public String getTemplateString()
{
return mTemplateString;
}
public List<String> getColumnNames()
{
return mColumnNames;
}
private void process(String templateString)
{
Matcher m = columnInCurlyBraces.matcher(templateString);
while (m.find()) {
String arg = m.group(2);
if (!StringUtils.isEmpty(arg)) {
mTemplateString.replace(arg, mIndex+"");
mColumnNames.add(arg);
mIndex++;
}
}
}
}
## Instruction:
Fix null pointer exception because of the missing value assignment.
## Code After:
package com.obidea.semantika.mapping.parser.r2rml;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.obidea.semantika.util.StringUtils;
public class R2RmlTemplate
{
private static final Pattern columnInCurlyBraces = Pattern.compile("\\{([^\\}]+)\\}");
private int mIndex = 1;
private String mTemplateString;
private List<String> mColumnNames = new ArrayList<String>();
public R2RmlTemplate(String templateString)
{
mTemplateString = templateString;
process(templateString);
}
public String getTemplateString()
{
return mTemplateString;
}
public List<String> getColumnNames()
{
return mColumnNames;
}
private void process(String templateString)
{
Matcher m = columnInCurlyBraces.matcher(templateString);
while (m.find()) {
String arg = m.group(2);
if (!StringUtils.isEmpty(arg)) {
mTemplateString.replace(arg, mIndex+"");
mColumnNames.add(arg);
mIndex++;
}
}
}
}
|
// ... existing code ...
public R2RmlTemplate(String templateString)
{
mTemplateString = templateString;
process(templateString);
}
// ... rest of the code ...
|
8fce8e72f5ff40e51605f3b14bcde5006f4eaa71
|
molly/utils/i18n.py
|
molly/utils/i18n.py
|
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except Model.DoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except Model.DoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
|
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except ObjectDoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except ObjectDoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
|
Fix bug in exception handling
|
Fix bug in exception handling
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
python
|
## Code Before:
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except Model.DoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except Model.DoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
## Instruction:
Fix bug in exception handling
## Code After:
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
try:
from django.utils.translation import override
except ImportError:
from django.utils.translation import activate, deactivate
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except ObjectDoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except ObjectDoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
raise
|
...
from django.utils.translation import get_language
from django.db.models import Model
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
try:
from django.utils.translation import override
...
def name_in_language(obj, field):
try:
return getattr(obj.names.get(language_code=get_language()), field)
except ObjectDoesNotExist:
try:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE), field)
except ObjectDoesNotExist:
if '-' in settings.LANGUAGE_CODE:
return getattr(obj.names.get(language_code=settings.LANGUAGE_CODE.split('-')[0]), field)
else:
...
|
6f6c7b1fd420d7faa2c5dd7db1c0bcc928065878
|
client/src/com/vaadin/client/connectors/NoSelectionModelConnector.java
|
client/src/com/vaadin/client/connectors/NoSelectionModelConnector.java
|
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.connectors;
import com.vaadin.client.ServerConnector;
import com.vaadin.client.widget.grid.selection.SelectionModel;
import com.vaadin.client.widget.grid.selection.SelectionModelNone;
import com.vaadin.shared.ui.Connect;
import com.vaadin.ui.Grid.NoSelectionModel;
import elemental.json.JsonObject;
/**
* Connector for server-side {@link NoSelectionModel}.
*/
@Connect(NoSelectionModel.class)
public class NoSelectionModelConnector extends
AbstractSelectionModelConnector<SelectionModel<JsonObject>> {
@Override
protected void extend(ServerConnector target) {
getGrid().setSelectionModel(createSelectionModel());
}
@Override
protected SelectionModel<JsonObject> createSelectionModel() {
return new SelectionModelNone<JsonObject>();
}
}
|
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.connectors;
import com.vaadin.client.ServerConnector;
import com.vaadin.client.widget.grid.selection.SelectionModel;
import com.vaadin.client.widget.grid.selection.SelectionModelNone;
import com.vaadin.shared.ui.Connect;
import com.vaadin.ui.Grid.NoSelectionModel;
import elemental.json.JsonObject;
/**
* Connector for server-side {@link NoSelectionModel}.
*
* @since 7.6
* @author Vaadin Ltd
*/
@Connect(NoSelectionModel.class)
public class NoSelectionModelConnector extends
AbstractSelectionModelConnector<SelectionModel<JsonObject>> {
@Override
protected void extend(ServerConnector target) {
getGrid().setSelectionModel(createSelectionModel());
}
@Override
protected SelectionModel<JsonObject> createSelectionModel() {
return new SelectionModelNone<JsonObject>();
}
}
|
Add missing @since 7.6 tags
|
Add missing @since 7.6 tags
Change-Id: Iffa8655403615d1f7345709c865dd14c6fa861b2
|
Java
|
apache-2.0
|
Legioth/vaadin,asashour/framework,kironapublic/vaadin,asashour/framework,Peppe/vaadin,jdahlstrom/vaadin.react,oalles/vaadin,jdahlstrom/vaadin.react,mstahv/framework,peterl1084/framework,Darsstar/framework,Legioth/vaadin,Darsstar/framework,peterl1084/framework,Peppe/vaadin,shahrzadmn/vaadin,oalles/vaadin,kironapublic/vaadin,mstahv/framework,Darsstar/framework,Legioth/vaadin,kironapublic/vaadin,oalles/vaadin,Peppe/vaadin,shahrzadmn/vaadin,Peppe/vaadin,Legioth/vaadin,kironapublic/vaadin,kironapublic/vaadin,oalles/vaadin,jdahlstrom/vaadin.react,asashour/framework,jdahlstrom/vaadin.react,mstahv/framework,shahrzadmn/vaadin,oalles/vaadin,mstahv/framework,asashour/framework,shahrzadmn/vaadin,peterl1084/framework,peterl1084/framework,jdahlstrom/vaadin.react,asashour/framework,peterl1084/framework,Darsstar/framework,Legioth/vaadin,Darsstar/framework,shahrzadmn/vaadin,mstahv/framework,Peppe/vaadin
|
java
|
## Code Before:
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.connectors;
import com.vaadin.client.ServerConnector;
import com.vaadin.client.widget.grid.selection.SelectionModel;
import com.vaadin.client.widget.grid.selection.SelectionModelNone;
import com.vaadin.shared.ui.Connect;
import com.vaadin.ui.Grid.NoSelectionModel;
import elemental.json.JsonObject;
/**
* Connector for server-side {@link NoSelectionModel}.
*/
@Connect(NoSelectionModel.class)
public class NoSelectionModelConnector extends
AbstractSelectionModelConnector<SelectionModel<JsonObject>> {
@Override
protected void extend(ServerConnector target) {
getGrid().setSelectionModel(createSelectionModel());
}
@Override
protected SelectionModel<JsonObject> createSelectionModel() {
return new SelectionModelNone<JsonObject>();
}
}
## Instruction:
Add missing @since 7.6 tags
Change-Id: Iffa8655403615d1f7345709c865dd14c6fa861b2
## Code After:
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.connectors;
import com.vaadin.client.ServerConnector;
import com.vaadin.client.widget.grid.selection.SelectionModel;
import com.vaadin.client.widget.grid.selection.SelectionModelNone;
import com.vaadin.shared.ui.Connect;
import com.vaadin.ui.Grid.NoSelectionModel;
import elemental.json.JsonObject;
/**
* Connector for server-side {@link NoSelectionModel}.
*
* @since 7.6
* @author Vaadin Ltd
*/
@Connect(NoSelectionModel.class)
public class NoSelectionModelConnector extends
AbstractSelectionModelConnector<SelectionModel<JsonObject>> {
@Override
protected void extend(ServerConnector target) {
getGrid().setSelectionModel(createSelectionModel());
}
@Override
protected SelectionModel<JsonObject> createSelectionModel() {
return new SelectionModelNone<JsonObject>();
}
}
|
// ... existing code ...
/**
* Connector for server-side {@link NoSelectionModel}.
*
* @since 7.6
* @author Vaadin Ltd
*/
@Connect(NoSelectionModel.class)
public class NoSelectionModelConnector extends
// ... rest of the code ...
|
ed51fe4a5dcdc3ed34b3af9fd7bf03b7cf5da30a
|
daemon.c
|
daemon.c
|
FILE* logfile;
void init() {
// Change the umask so we can write to log files
umask(0);
// We should initialize and open log files here
logfile = fopen("./creddit.log", "a");
if (logfile == NULL) {
printf("Failed to create file with error: %d\n", errno);
exit(errno);
}
}
void run() {
}
int main() {
pid_t pid = fork();
// Check to see if there was an error with the fork
if (pid < 0) exit(1);
// If we are the child, then we do the heavy lifting
if (pid == 0) {
init();
run();
}
// Elsewise, we are the parent and we want to exit gracefully
exit(0);
}
|
FILE* logfile;
void init() {
// Change the umask so we can write to log files
umask(0);
// We should initialize and open log files here
logfile = fopen("./creddit.log", "a");
if (logfile == NULL) {
printf("Failed to create file with error: %d\n", errno);
exit(errno);
}
// Now we need to get a new unique SID so we aren't an orphan
// Remember our parent has died.
if (setsid() < 0) {
fprintf(logfile, "Could not set our SID: %d\n", errno);
exit(errno);
}
}
void run() {
}
int main() {
pid_t pid = fork();
// Check to see if there was an error with the fork
if (pid < 0) exit(1);
// If we are the child, then we do the heavy lifting
if (pid == 0) {
init();
run();
}
// Elsewise, we are the parent and we want to exit gracefully
exit(0);
}
|
Modify session id and first test of logger
|
Modify session id and first test of logger
|
C
|
mit
|
nguyenmp/creddit
|
c
|
## Code Before:
FILE* logfile;
void init() {
// Change the umask so we can write to log files
umask(0);
// We should initialize and open log files here
logfile = fopen("./creddit.log", "a");
if (logfile == NULL) {
printf("Failed to create file with error: %d\n", errno);
exit(errno);
}
}
void run() {
}
int main() {
pid_t pid = fork();
// Check to see if there was an error with the fork
if (pid < 0) exit(1);
// If we are the child, then we do the heavy lifting
if (pid == 0) {
init();
run();
}
// Elsewise, we are the parent and we want to exit gracefully
exit(0);
}
## Instruction:
Modify session id and first test of logger
## Code After:
FILE* logfile;
void init() {
// Change the umask so we can write to log files
umask(0);
// We should initialize and open log files here
logfile = fopen("./creddit.log", "a");
if (logfile == NULL) {
printf("Failed to create file with error: %d\n", errno);
exit(errno);
}
// Now we need to get a new unique SID so we aren't an orphan
// Remember our parent has died.
if (setsid() < 0) {
fprintf(logfile, "Could not set our SID: %d\n", errno);
exit(errno);
}
}
void run() {
}
int main() {
pid_t pid = fork();
// Check to see if there was an error with the fork
if (pid < 0) exit(1);
// If we are the child, then we do the heavy lifting
if (pid == 0) {
init();
run();
}
// Elsewise, we are the parent and we want to exit gracefully
exit(0);
}
|
...
logfile = fopen("./creddit.log", "a");
if (logfile == NULL) {
printf("Failed to create file with error: %d\n", errno);
exit(errno);
}
// Now we need to get a new unique SID so we aren't an orphan
// Remember our parent has died.
if (setsid() < 0) {
fprintf(logfile, "Could not set our SID: %d\n", errno);
exit(errno);
}
}
...
|
68625abd9bce7411aa27375a2668d960ad2021f4
|
cell/results.py
|
cell/results.py
|
"""cell.result"""
from __future__ import absolute_import
from __future__ import with_statement
from kombu.pools import producers
from .exceptions import CellError, NoReplyError
__all__ = ['AsyncResult']
class AsyncResult(object):
Error = CellError
NoReplyError = NoReplyError
def __init__(self, ticket, actor):
self.ticket = ticket
self.actor = actor
def _first(self, replies):
if replies is not None:
replies = list(replies)
if replies:
return replies[0]
raise self.NoReplyError('No reply received within time constraint')
def get(self, **kwargs):
return self._first(self.gather(**dict(kwargs, limit=1)))
def gather(self, propagate=True, **kwargs):
connection = self.actor.connection
gather = self._gather
with producers[connection].acquire(block=True) as producer:
for r in gather(producer.connection, producer.channel, self.ticket,
propagate=propagate, **kwargs):
yield r
def _gather(self, *args, **kwargs):
propagate = kwargs.pop('propagate', True)
return (self.to_python(reply, propagate=propagate)
for reply in self.actor._collect_replies(*args, **kwargs))
def to_python(self, reply, propagate=True):
try:
return reply['ok']
except KeyError:
error = self.Error(*reply.get('nok') or ())
if propagate:
raise error
return error
|
"""cell.result"""
from __future__ import absolute_import
from __future__ import with_statement
from kombu.pools import producers
from .exceptions import CellError, NoReplyError
__all__ = ['AsyncResult']
class AsyncResult(object):
Error = CellError
NoReplyError = NoReplyError
def __init__(self, ticket, actor):
self.ticket = ticket
self.actor = actor
self._result = None
def _first(self, replies):
if replies is not None:
replies = list(replies)
if replies:
return replies[0]
raise self.NoReplyError('No reply received within time constraint')
@property
def result(self):
if not self._result:
self._result = self.get()
return self.result
def get(self, **kwargs):
return self._first(self.gather(**dict(kwargs, limit=1)))
def gather(self, propagate=True, **kwargs):
connection = self.actor.connection
gather = self._gather
with producers[connection].acquire(block=True) as producer:
for r in gather(producer.connection, producer.channel, self.ticket,
propagate=propagate, **kwargs):
yield r
def _gather(self, *args, **kwargs):
propagate = kwargs.pop('propagate', True)
return (self.to_python(reply, propagate=propagate)
for reply in self.actor._collect_replies(*args, **kwargs))
def to_python(self, reply, propagate=True):
try:
return reply['ok']
except KeyError:
error = self.Error(*reply.get('nok') or ())
if propagate:
raise error
return error
|
Add result property to AsyncResult (it blocks if the result has not been previously retrieved, or return the result otherwise)
|
Add result property to AsyncResult
(it blocks if the result has not been previously retrieved, or return the
result otherwise)
|
Python
|
bsd-3-clause
|
celery/cell,celery/cell
|
python
|
## Code Before:
"""cell.result"""
from __future__ import absolute_import
from __future__ import with_statement
from kombu.pools import producers
from .exceptions import CellError, NoReplyError
__all__ = ['AsyncResult']
class AsyncResult(object):
Error = CellError
NoReplyError = NoReplyError
def __init__(self, ticket, actor):
self.ticket = ticket
self.actor = actor
def _first(self, replies):
if replies is not None:
replies = list(replies)
if replies:
return replies[0]
raise self.NoReplyError('No reply received within time constraint')
def get(self, **kwargs):
return self._first(self.gather(**dict(kwargs, limit=1)))
def gather(self, propagate=True, **kwargs):
connection = self.actor.connection
gather = self._gather
with producers[connection].acquire(block=True) as producer:
for r in gather(producer.connection, producer.channel, self.ticket,
propagate=propagate, **kwargs):
yield r
def _gather(self, *args, **kwargs):
propagate = kwargs.pop('propagate', True)
return (self.to_python(reply, propagate=propagate)
for reply in self.actor._collect_replies(*args, **kwargs))
def to_python(self, reply, propagate=True):
try:
return reply['ok']
except KeyError:
error = self.Error(*reply.get('nok') or ())
if propagate:
raise error
return error
## Instruction:
Add result property to AsyncResult
(it blocks if the result has not been previously retrieved, or return the
result otherwise)
## Code After:
"""cell.result"""
from __future__ import absolute_import
from __future__ import with_statement
from kombu.pools import producers
from .exceptions import CellError, NoReplyError
__all__ = ['AsyncResult']
class AsyncResult(object):
Error = CellError
NoReplyError = NoReplyError
def __init__(self, ticket, actor):
self.ticket = ticket
self.actor = actor
self._result = None
def _first(self, replies):
if replies is not None:
replies = list(replies)
if replies:
return replies[0]
raise self.NoReplyError('No reply received within time constraint')
@property
def result(self):
if not self._result:
self._result = self.get()
return self.result
def get(self, **kwargs):
return self._first(self.gather(**dict(kwargs, limit=1)))
def gather(self, propagate=True, **kwargs):
connection = self.actor.connection
gather = self._gather
with producers[connection].acquire(block=True) as producer:
for r in gather(producer.connection, producer.channel, self.ticket,
propagate=propagate, **kwargs):
yield r
def _gather(self, *args, **kwargs):
propagate = kwargs.pop('propagate', True)
return (self.to_python(reply, propagate=propagate)
for reply in self.actor._collect_replies(*args, **kwargs))
def to_python(self, reply, propagate=True):
try:
return reply['ok']
except KeyError:
error = self.Error(*reply.get('nok') or ())
if propagate:
raise error
return error
|
// ... existing code ...
def __init__(self, ticket, actor):
self.ticket = ticket
self.actor = actor
self._result = None
def _first(self, replies):
if replies is not None:
// ... modified code ...
if replies:
return replies[0]
raise self.NoReplyError('No reply received within time constraint')
@property
def result(self):
if not self._result:
self._result = self.get()
return self.result
def get(self, **kwargs):
return self._first(self.gather(**dict(kwargs, limit=1)))
// ... rest of the code ...
|
0f9d3b0ed9efc72b8b3fd4d466caa4517691546c
|
strategies/alexStrategies.py
|
strategies/alexStrategies.py
|
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
|
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
class CardCounter:
"""This strategy folds based on card counting expectation values."""
def __init__(self, scared=0.23):
from collections import Counter
self.Counter = Counter
self.scared = scared
def play(self, info):
c = self.Counter(info.deck)
if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
|
Add a simple card counting strategy
|
Add a simple card counting strategy
|
Python
|
mit
|
AlexMooney/pairsTournament
|
python
|
## Code Before:
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
## Instruction:
Add a simple card counting strategy
## Code After:
class FixFoldStrategy:
"""This strategy folds every time there is a small card available."""
def __init__(self, N=3):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1] > self.N:
return 'Hit me'
else:
return 'fold'
class RatioFoldStrategy:
"""This strategy folds more readily as their stack grows worse"""
def __init__(self, N=4):
self.N = N
def play(self, info):
if info.bestFold(self.player)[1]*self.N > sum([s*s for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
class CardCounter:
"""This strategy folds based on card counting expectation values."""
def __init__(self, scared=0.23):
from collections import Counter
self.Counter = Counter
self.scared = scared
def play(self, info):
c = self.Counter(info.deck)
if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
|
# ... existing code ...
return 'Hit me'
else:
return 'fold'
class CardCounter:
"""This strategy folds based on card counting expectation values."""
def __init__(self, scared=0.23):
from collections import Counter
self.Counter = Counter
self.scared = scared
def play(self, info):
c = self.Counter(info.deck)
if info.bestFold(self.player)[1] > self.scared*sum([s*c[s] for s in c])/len(info.deck) + sum([s*c[s]/len(info.deck) for s in self.player.stack]):
return 'Hit me'
else:
return 'fold'
# ... rest of the code ...
|
3a308c37856bafd8ecbc64a2e425c8199dcf2e68
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='LinkFinder',
packages=find_packages(),
version='1.0',
description="A python script that finds endpoints in JavaScript files.",
long_description=open('README.md').read(),
author='Gerben Javado',
url='https://github.com/GerbenJavado/LinkFinder',
install_requires=['argparse', 'jsbeautifier'],
)
|
from setuptools import setup, find_packages
setup(
name='LinkFinder',
packages=find_packages(),
version='1.0',
description="A python script that finds endpoints in JavaScript files.",
long_description=open('README.md').read(),
author='Gerben Javado',
url='https://github.com/GerbenJavado/LinkFinder',
py_modules=['linkfinder'],
install_requires=['argparse', 'jsbeautifier'],
)
|
Allow use as vendor library
|
Allow use as vendor library
With this little change this tool can be used as a dependency
|
Python
|
mit
|
GerbenJavado/LinkFinder,GerbenJavado/LinkFinder
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name='LinkFinder',
packages=find_packages(),
version='1.0',
description="A python script that finds endpoints in JavaScript files.",
long_description=open('README.md').read(),
author='Gerben Javado',
url='https://github.com/GerbenJavado/LinkFinder',
install_requires=['argparse', 'jsbeautifier'],
)
## Instruction:
Allow use as vendor library
With this little change this tool can be used as a dependency
## Code After:
from setuptools import setup, find_packages
setup(
name='LinkFinder',
packages=find_packages(),
version='1.0',
description="A python script that finds endpoints in JavaScript files.",
long_description=open('README.md').read(),
author='Gerben Javado',
url='https://github.com/GerbenJavado/LinkFinder',
py_modules=['linkfinder'],
install_requires=['argparse', 'jsbeautifier'],
)
|
# ... existing code ...
long_description=open('README.md').read(),
author='Gerben Javado',
url='https://github.com/GerbenJavado/LinkFinder',
py_modules=['linkfinder'],
install_requires=['argparse', 'jsbeautifier'],
)
# ... rest of the code ...
|
0d28e0d7985e0b3e56a57438de8a164c4e60adb9
|
okapi-core/src/main/java/org/folio/okapi/bean/Permission.java
|
okapi-core/src/main/java/org/folio/okapi/bean/Permission.java
|
/*
* Copyright (c) 2015-2017, Index Data
* All rights reserved.
* See the file LICENSE for details.
*/
package org.folio.okapi.bean;
import com.fasterxml.jackson.annotation.JsonInclude;
@JsonInclude(JsonInclude.Include.NON_NULL)
public class Permission {
private String permissionName;
private String displayName;
private String description;
private String[] subPermissions;
public Permission() {
}
public Permission(Permission other) {
this.permissionName = other.permissionName;
this.displayName = other.displayName;
this.description = other.description;
this.subPermissions = other.subPermissions;
}
public String getPermissionName() {
return permissionName;
}
public void setPermissionName(String permissionName) {
this.permissionName = permissionName;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String[] getSubPermissions() {
return subPermissions;
}
public void setSubPermissions(String[] subPermissions) {
this.subPermissions = subPermissions;
}
}
|
package org.folio.okapi.bean;
import com.fasterxml.jackson.annotation.JsonInclude;
@JsonInclude(JsonInclude.Include.NON_NULL)
public class Permission {
private String permissionName;
private String displayName;
private String description;
private String[] subPermissions;
public Permission() {
}
public Permission(Permission other) {
this.permissionName = other.permissionName;
this.displayName = other.displayName;
this.description = other.description;
this.subPermissions = other.subPermissions;
}
public String getPermissionName() {
return permissionName;
}
public void setPermissionName(String permissionName) {
this.permissionName = permissionName;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String[] getSubPermissions() {
return subPermissions;
}
public void setSubPermissions(String[] subPermissions) {
this.subPermissions = subPermissions;
}
}
|
Remove superfluous license headers (OKAPI-109)
|
Remove superfluous license headers (OKAPI-109)
|
Java
|
apache-2.0
|
folio-org/okapi,folio-org/okapi
|
java
|
## Code Before:
/*
* Copyright (c) 2015-2017, Index Data
* All rights reserved.
* See the file LICENSE for details.
*/
package org.folio.okapi.bean;
import com.fasterxml.jackson.annotation.JsonInclude;
@JsonInclude(JsonInclude.Include.NON_NULL)
public class Permission {
private String permissionName;
private String displayName;
private String description;
private String[] subPermissions;
public Permission() {
}
public Permission(Permission other) {
this.permissionName = other.permissionName;
this.displayName = other.displayName;
this.description = other.description;
this.subPermissions = other.subPermissions;
}
public String getPermissionName() {
return permissionName;
}
public void setPermissionName(String permissionName) {
this.permissionName = permissionName;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String[] getSubPermissions() {
return subPermissions;
}
public void setSubPermissions(String[] subPermissions) {
this.subPermissions = subPermissions;
}
}
## Instruction:
Remove superfluous license headers (OKAPI-109)
## Code After:
package org.folio.okapi.bean;
import com.fasterxml.jackson.annotation.JsonInclude;
@JsonInclude(JsonInclude.Include.NON_NULL)
public class Permission {
private String permissionName;
private String displayName;
private String description;
private String[] subPermissions;
public Permission() {
}
public Permission(Permission other) {
this.permissionName = other.permissionName;
this.displayName = other.displayName;
this.description = other.description;
this.subPermissions = other.subPermissions;
}
public String getPermissionName() {
return permissionName;
}
public void setPermissionName(String permissionName) {
this.permissionName = permissionName;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String[] getSubPermissions() {
return subPermissions;
}
public void setSubPermissions(String[] subPermissions) {
this.subPermissions = subPermissions;
}
}
|
# ... existing code ...
package org.folio.okapi.bean;
import com.fasterxml.jackson.annotation.JsonInclude;
# ... rest of the code ...
|
3190d0e00909383ef52ef56083a1c25396f597ae
|
OrbitQt/topdownwidget.h
|
OrbitQt/topdownwidget.h
|
// Copyright (c) 2020 The Orbit Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ORBIT_QT_TOP_DOWN_WIDGET_H_
#define ORBIT_QT_TOP_DOWN_WIDGET_H_
#include <QSortFilterProxyModel>
#include <memory>
#include "TopDownView.h"
#include "ui_topdownwidget.h"
class TopDownWidget : public QWidget {
Q_OBJECT
public:
explicit TopDownWidget(QWidget* parent = nullptr)
: QWidget{parent}, ui_{std::make_unique<Ui::TopDownWidget>()} {
ui_->setupUi(this);
connect(ui_->topDownTreeView,
SIGNAL(customContextMenuRequested(const QPoint&)), this,
SLOT(onCustomContextMenuRequested(const QPoint&)));
}
void SetTopDownView(std::unique_ptr<TopDownView> top_down_view);
private slots:
void onCustomContextMenuRequested(const QPoint& point);
private:
static const std::string kActionExpandAll;
static const std::string kActionCollapseAll;
std::unique_ptr<Ui::TopDownWidget> ui_;
};
#endif // ORBIT_QT_TOP_DOWN_WIDGET_H_
|
// Copyright (c) 2020 The Orbit Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ORBIT_QT_TOP_DOWN_WIDGET_H_
#define ORBIT_QT_TOP_DOWN_WIDGET_H_
#include <QSortFilterProxyModel>
#include <memory>
#include "TopDownView.h"
#include "ui_topdownwidget.h"
class TopDownWidget : public QWidget {
Q_OBJECT
public:
explicit TopDownWidget(QWidget* parent = nullptr)
: QWidget{parent}, ui_{std::make_unique<Ui::TopDownWidget>()} {
ui_->setupUi(this);
connect(ui_->topDownTreeView, &QTreeView::customContextMenuRequested, this,
&TopDownWidget::onCustomContextMenuRequested);
}
void SetTopDownView(std::unique_ptr<TopDownView> top_down_view);
private slots:
void onCustomContextMenuRequested(const QPoint& point);
private:
static const std::string kActionExpandAll;
static const std::string kActionCollapseAll;
std::unique_ptr<Ui::TopDownWidget> ui_;
};
#endif // ORBIT_QT_TOP_DOWN_WIDGET_H_
|
Use Qt 5 signal-slot syntax in TopDownWidget
|
Use Qt 5 signal-slot syntax in TopDownWidget
|
C
|
bsd-2-clause
|
google/orbit,google/orbit,google/orbit,google/orbit
|
c
|
## Code Before:
// Copyright (c) 2020 The Orbit Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ORBIT_QT_TOP_DOWN_WIDGET_H_
#define ORBIT_QT_TOP_DOWN_WIDGET_H_
#include <QSortFilterProxyModel>
#include <memory>
#include "TopDownView.h"
#include "ui_topdownwidget.h"
class TopDownWidget : public QWidget {
Q_OBJECT
public:
explicit TopDownWidget(QWidget* parent = nullptr)
: QWidget{parent}, ui_{std::make_unique<Ui::TopDownWidget>()} {
ui_->setupUi(this);
connect(ui_->topDownTreeView,
SIGNAL(customContextMenuRequested(const QPoint&)), this,
SLOT(onCustomContextMenuRequested(const QPoint&)));
}
void SetTopDownView(std::unique_ptr<TopDownView> top_down_view);
private slots:
void onCustomContextMenuRequested(const QPoint& point);
private:
static const std::string kActionExpandAll;
static const std::string kActionCollapseAll;
std::unique_ptr<Ui::TopDownWidget> ui_;
};
#endif // ORBIT_QT_TOP_DOWN_WIDGET_H_
## Instruction:
Use Qt 5 signal-slot syntax in TopDownWidget
## Code After:
// Copyright (c) 2020 The Orbit Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ORBIT_QT_TOP_DOWN_WIDGET_H_
#define ORBIT_QT_TOP_DOWN_WIDGET_H_
#include <QSortFilterProxyModel>
#include <memory>
#include "TopDownView.h"
#include "ui_topdownwidget.h"
class TopDownWidget : public QWidget {
Q_OBJECT
public:
explicit TopDownWidget(QWidget* parent = nullptr)
: QWidget{parent}, ui_{std::make_unique<Ui::TopDownWidget>()} {
ui_->setupUi(this);
connect(ui_->topDownTreeView, &QTreeView::customContextMenuRequested, this,
&TopDownWidget::onCustomContextMenuRequested);
}
void SetTopDownView(std::unique_ptr<TopDownView> top_down_view);
private slots:
void onCustomContextMenuRequested(const QPoint& point);
private:
static const std::string kActionExpandAll;
static const std::string kActionCollapseAll;
std::unique_ptr<Ui::TopDownWidget> ui_;
};
#endif // ORBIT_QT_TOP_DOWN_WIDGET_H_
|
// ... existing code ...
explicit TopDownWidget(QWidget* parent = nullptr)
: QWidget{parent}, ui_{std::make_unique<Ui::TopDownWidget>()} {
ui_->setupUi(this);
connect(ui_->topDownTreeView, &QTreeView::customContextMenuRequested, this,
&TopDownWidget::onCustomContextMenuRequested);
}
void SetTopDownView(std::unique_ptr<TopDownView> top_down_view);
// ... rest of the code ...
|
ed39e85be1bc88113cce771f1e106667402243eb
|
nano/src/test/java/com/airhacks/nano/ContextsTest.java
|
nano/src/test/java/com/airhacks/nano/ContextsTest.java
|
package com.airhacks.nano;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.junit.Test;
/**
*
* @author airhacks.com
*/
public class ContextsTest {
@Test
public void extractContext() {
Path path = Paths.get("./src/test/js/hello/duke.js");
String actual = Contexts.extractContext(path);
assertThat(actual, is("/src/test/js/hello/duke"));
}
}
|
package com.airhacks.nano;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.junit.Test;
/**
*
* @author airhacks.com
*/
public class ContextsTest {
@Test
public void extractContext() {
Path path = Paths.get("./src/test/js/hello/duke.js");
String actual = Contexts.extractContext(path);
Path actualPath = Paths.get(actual);
Path expectedPath = Paths.get("/src/test/js/hello/duke");
assertThat(actualPath, is(expectedPath));
}
}
|
Test compares path objects, not string -> should be OS-independent
|
Test compares path objects, not string -> should be OS-independent
|
Java
|
apache-2.0
|
AdamBien/nano,AdamBien/nano
|
java
|
## Code Before:
package com.airhacks.nano;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.junit.Test;
/**
*
* @author airhacks.com
*/
public class ContextsTest {
@Test
public void extractContext() {
Path path = Paths.get("./src/test/js/hello/duke.js");
String actual = Contexts.extractContext(path);
assertThat(actual, is("/src/test/js/hello/duke"));
}
}
## Instruction:
Test compares path objects, not string -> should be OS-independent
## Code After:
package com.airhacks.nano;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.junit.Test;
/**
*
* @author airhacks.com
*/
public class ContextsTest {
@Test
public void extractContext() {
Path path = Paths.get("./src/test/js/hello/duke.js");
String actual = Contexts.extractContext(path);
Path actualPath = Paths.get(actual);
Path expectedPath = Paths.get("/src/test/js/hello/duke");
assertThat(actualPath, is(expectedPath));
}
}
|
# ... existing code ...
public void extractContext() {
Path path = Paths.get("./src/test/js/hello/duke.js");
String actual = Contexts.extractContext(path);
Path actualPath = Paths.get(actual);
Path expectedPath = Paths.get("/src/test/js/hello/duke");
assertThat(actualPath, is(expectedPath));
}
}
# ... rest of the code ...
|
1964407097b15c92e9b3aa77dc3d6d94bb656757
|
turbustat/tests/test_dendro.py
|
turbustat/tests/test_dendro.py
|
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
import os
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
# Test loading and saving
tester.save_results(keep_data=False)
tester.load_results("dendrogram_stats_output.pkl")
# Remove the file
os.remove("dendrogram_stats_output.pkl")
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
Add testing of loading and saving for Dendrogram_Stats
|
Add testing of loading and saving for Dendrogram_Stats
|
Python
|
mit
|
Astroua/TurbuStat,e-koch/TurbuStat
|
python
|
## Code Before:
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
## Instruction:
Add testing of loading and saving for Dendrogram_Stats
## Code After:
'''
Tests for Dendrogram statistics
'''
import numpy as np
import numpy.testing as npt
import os
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
min_deltas = np.logspace(-1.5, 0.5, 40)
def test_DendroStat():
tester = Dendrogram_Stats(dataset1["cube"],
min_deltas=min_deltas)
tester.run(periodic_bounds=False)
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
# Test loading and saving
tester.save_results(keep_data=False)
tester.load_results("dendrogram_stats_output.pkl")
# Remove the file
os.remove("dendrogram_stats_output.pkl")
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
tester_dist = \
DendroDistance(dataset1["cube"],
dataset2["cube"],
min_deltas=min_deltas,
periodic_bounds=False).distance_metric()
npt.assert_almost_equal(tester_dist.histogram_distance,
computed_distances["dendrohist_distance"])
npt.assert_almost_equal(tester_dist.num_distance,
computed_distances["dendronum_distance"])
|
// ... existing code ...
import numpy as np
import numpy.testing as npt
import os
from ..statistics import Dendrogram_Stats, DendroDistance
from ._testing_data import \
// ... modified code ...
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
# Test loading and saving
tester.save_results(keep_data=False)
tester.load_results("dendrogram_stats_output.pkl")
# Remove the file
os.remove("dendrogram_stats_output.pkl")
npt.assert_allclose(tester.numfeatures,
computed_data["dendrogram_val"])
def test_DendroDistance():
// ... rest of the code ...
|
0b963c5a4eccde9050903d6859faee9096d3ce2b
|
test/entrytestsuite.h
|
test/entrytestsuite.h
|
class EntryTestSuite : public CxxTest::TestSuite
{
public:
void testEntryHasGivenTitle(void)
{
testEntry.setTitle("testTitle");
TS_ASSERT_SAME_DATA(testEntry.title().c_str(), "testTitle", 9);
}
diaryengine::Entry testEntry;
};
#endif // ENTRYTESTSUITE
|
class EntryTestSuite : public CxxTest::TestSuite
{
public:
void testTitleCanBeSetCorrectly(void)
{
testEntry.setTitle("testTitle");
TS_ASSERT_SAME_DATA(testEntry.title().c_str(), "testTitle", 9);
}
void testDateCanBeSetCorrectlyWithISOString(void)
{
testEntry.setDate("2007-03-01T13:00:00Z");
TS_ASSERT(testEntry.date() == "2007-03-01T13:00:00Z");
}
void testDateUnderstandsTimeZone(void)
{
testEntry.setDate("2007-03-01T13:00:00+02:00");
TS_ASSERT(testEntry.date() == "2007-03-01T13:00:00+02:00");
}
void testIdRegenerationGeneratesDifferentUUID()
{
testEntry.regenerateId();
long id = testEntry.id();
testEntry.regenerateId();
TS_ASSERT_DIFFERS(testEntry.id(), id);
}
diaryengine::Entry testEntry;
};
#endif // ENTRYTESTSUITE
|
Add tests for id generation and ISO date strings
|
Add tests for id generation and ISO date strings
|
C
|
bsd-3-clause
|
Acce0ss/diary-engine
|
c
|
## Code Before:
class EntryTestSuite : public CxxTest::TestSuite
{
public:
void testEntryHasGivenTitle(void)
{
testEntry.setTitle("testTitle");
TS_ASSERT_SAME_DATA(testEntry.title().c_str(), "testTitle", 9);
}
diaryengine::Entry testEntry;
};
#endif // ENTRYTESTSUITE
## Instruction:
Add tests for id generation and ISO date strings
## Code After:
class EntryTestSuite : public CxxTest::TestSuite
{
public:
void testTitleCanBeSetCorrectly(void)
{
testEntry.setTitle("testTitle");
TS_ASSERT_SAME_DATA(testEntry.title().c_str(), "testTitle", 9);
}
void testDateCanBeSetCorrectlyWithISOString(void)
{
testEntry.setDate("2007-03-01T13:00:00Z");
TS_ASSERT(testEntry.date() == "2007-03-01T13:00:00Z");
}
void testDateUnderstandsTimeZone(void)
{
testEntry.setDate("2007-03-01T13:00:00+02:00");
TS_ASSERT(testEntry.date() == "2007-03-01T13:00:00+02:00");
}
void testIdRegenerationGeneratesDifferentUUID()
{
testEntry.regenerateId();
long id = testEntry.id();
testEntry.regenerateId();
TS_ASSERT_DIFFERS(testEntry.id(), id);
}
diaryengine::Entry testEntry;
};
#endif // ENTRYTESTSUITE
|
...
class EntryTestSuite : public CxxTest::TestSuite
{
public:
void testTitleCanBeSetCorrectly(void)
{
testEntry.setTitle("testTitle");
TS_ASSERT_SAME_DATA(testEntry.title().c_str(), "testTitle", 9);
}
void testDateCanBeSetCorrectlyWithISOString(void)
{
testEntry.setDate("2007-03-01T13:00:00Z");
TS_ASSERT(testEntry.date() == "2007-03-01T13:00:00Z");
}
void testDateUnderstandsTimeZone(void)
{
testEntry.setDate("2007-03-01T13:00:00+02:00");
TS_ASSERT(testEntry.date() == "2007-03-01T13:00:00+02:00");
}
void testIdRegenerationGeneratesDifferentUUID()
{
testEntry.regenerateId();
long id = testEntry.id();
testEntry.regenerateId();
TS_ASSERT_DIFFERS(testEntry.id(), id);
}
diaryengine::Entry testEntry;
...
|
4ca6d139139a08151f7cdf89993ded3440287a4a
|
keyform/urls.py
|
keyform/urls.py
|
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from keyform import views
urlpatterns = [
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^contact$', views.ContactView.as_view(), name='contact'),
url(r'^edit-contact/(?P<pk>\d+)$', views.EditContactView.as_view(), name='edit-contact'),
url(r'^create-contact$', views.NewContactView.as_view(), name='create-contact'),
url(r'^edit-request/(?P<pk>\d+)$', views.RequestView.as_view(), name='edit-request'),
url(r'^create$', views.KeyRequest.as_view(), name='create'),
url(r'^add-comment$', views.RequestCommentView.as_view(), name='add-comment'),
url(r'^login$', login, name='login', kwargs={'template_name': 'keyform/login.html'}),
url(r'^logout$', logout_then_login, name='logout'),
]
|
from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic import RedirectView
from django.contrib.auth.views import login, logout_then_login
from keyform import views
urlpatterns = [
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^table.php$', RedirectView.as_view(pattern_name='home', permanent=True)),
url(r'^contact$', views.ContactView.as_view(), name='contact'),
url(r'^edit-contact/(?P<pk>\d+)$', views.EditContactView.as_view(), name='edit-contact'),
url(r'^create-contact$', views.NewContactView.as_view(), name='create-contact'),
url(r'^edit-request/(?P<pk>\d+)$', views.RequestView.as_view(), name='edit-request'),
url(r'^create$', views.KeyRequest.as_view(), name='create'),
url(r'^add-comment$', views.RequestCommentView.as_view(), name='add-comment'),
url(r'^login$', login, name='login', kwargs={'template_name': 'keyform/login.html'}),
url(r'^logout$', logout_then_login, name='logout'),
]
|
Add redirect for old hotlinks
|
Add redirect for old hotlinks
|
Python
|
mit
|
mostateresnet/keyformproject,mostateresnet/keyformproject,mostateresnet/keyformproject
|
python
|
## Code Before:
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth.views import login, logout_then_login
from keyform import views
urlpatterns = [
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^contact$', views.ContactView.as_view(), name='contact'),
url(r'^edit-contact/(?P<pk>\d+)$', views.EditContactView.as_view(), name='edit-contact'),
url(r'^create-contact$', views.NewContactView.as_view(), name='create-contact'),
url(r'^edit-request/(?P<pk>\d+)$', views.RequestView.as_view(), name='edit-request'),
url(r'^create$', views.KeyRequest.as_view(), name='create'),
url(r'^add-comment$', views.RequestCommentView.as_view(), name='add-comment'),
url(r'^login$', login, name='login', kwargs={'template_name': 'keyform/login.html'}),
url(r'^logout$', logout_then_login, name='logout'),
]
## Instruction:
Add redirect for old hotlinks
## Code After:
from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic import RedirectView
from django.contrib.auth.views import login, logout_then_login
from keyform import views
urlpatterns = [
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^table.php$', RedirectView.as_view(pattern_name='home', permanent=True)),
url(r'^contact$', views.ContactView.as_view(), name='contact'),
url(r'^edit-contact/(?P<pk>\d+)$', views.EditContactView.as_view(), name='edit-contact'),
url(r'^create-contact$', views.NewContactView.as_view(), name='create-contact'),
url(r'^edit-request/(?P<pk>\d+)$', views.RequestView.as_view(), name='edit-request'),
url(r'^create$', views.KeyRequest.as_view(), name='create'),
url(r'^add-comment$', views.RequestCommentView.as_view(), name='add-comment'),
url(r'^login$', login, name='login', kwargs={'template_name': 'keyform/login.html'}),
url(r'^logout$', logout_then_login, name='logout'),
]
|
...
from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic import RedirectView
from django.contrib.auth.views import login, logout_then_login
from keyform import views
urlpatterns = [
url(r'^$', views.HomeView.as_view(), name='home'),
url(r'^table.php$', RedirectView.as_view(pattern_name='home', permanent=True)),
url(r'^contact$', views.ContactView.as_view(), name='contact'),
url(r'^edit-contact/(?P<pk>\d+)$', views.EditContactView.as_view(), name='edit-contact'),
url(r'^create-contact$', views.NewContactView.as_view(), name='create-contact'),
...
|
4e8c84bf36250d7e61b585fc5db545206cab9730
|
perfkitbenchmarker/scripts/spark_table.py
|
perfkitbenchmarker/scripts/spark_table.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('drop table if exists ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
from pyspark.sql.utils import AnalysisException
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('DROP TABLE IF EXISTS ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
try:
# This loads the partitions under the table if table is partitioned.
spark.sql('MSCK REPAIR TABLE ' + table)
except AnalysisException:
# The table was not partitioned, which was presumably expected
pass
if __name__ == '__main__':
main()
|
Support creating Hive tables with partitioned data.
|
Support creating Hive tables with partitioned data.
PiperOrigin-RevId: 335539022
|
Python
|
apache-2.0
|
GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker
|
python
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('drop table if exists ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
if __name__ == '__main__':
main()
## Instruction:
Support creating Hive tables with partitioned data.
PiperOrigin-RevId: 335539022
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import os
from pyspark.sql import SparkSession
from pyspark.sql.utils import AnalysisException
def main():
parser = argparse.ArgumentParser()
parser.add_argument('root_dir')
parser.add_argument('tables', type=lambda csv: csv.split(','))
args = parser.parse_args()
spark = (SparkSession.builder
.appName('Setup Spark tables')
.enableHiveSupport()
.getOrCreate())
for table in args.tables:
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('DROP TABLE IF EXISTS ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
try:
# This loads the partitions under the table if table is partitioned.
spark.sql('MSCK REPAIR TABLE ' + table)
except AnalysisException:
# The table was not partitioned, which was presumably expected
pass
if __name__ == '__main__':
main()
|
...
import logging
import os
from pyspark.sql import SparkSession
from pyspark.sql.utils import AnalysisException
def main():
...
logging.info('Creating table %s', table)
table_dir = os.path.join(args.root_dir, table)
# clean up previous table
spark.sql('DROP TABLE IF EXISTS ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
try:
# This loads the partitions under the table if table is partitioned.
spark.sql('MSCK REPAIR TABLE ' + table)
except AnalysisException:
# The table was not partitioned, which was presumably expected
pass
if __name__ == '__main__':
main()
...
|
6bd59ef149ec32f7d0a64ea2ad50a2729aceb6f5
|
fluent_contents/models/mixins.py
|
fluent_contents/models/mixins.py
|
from django.core.cache import cache
class CachedModelMixin(object):
"""
Mixin to add cache expiration to a model.
"""
clear_cache_on_add = False
def save(self, *args, **kwargs):
is_new = not self.pk or self._state.adding
super(CachedModelMixin, self).save(*args, **kwargs)
if not is_new or self.clear_cache_on_add:
self.clear_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
deleted_pk = self.pk
super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
def clear_cache(self):
"""
Delete the cache keys associated with this model.
"""
cache.delete_many(self.get_cache_keys())
clear_cache.alters_data = True
def get_cache_keys(self):
"""
Get a list of all cache keys associated with this model.
"""
raise NotImplementedError("Implement get_cache_keys() or clear_cache()")
|
from django.core.cache import cache
class CachedModelMixin(object):
"""
Mixin to add cache expiration to a model.
"""
clear_cache_on_add = False
def save(self, *args, **kwargs):
is_new = not self.pk or self._state.adding
super(CachedModelMixin, self).save(*args, **kwargs)
if not is_new or self.clear_cache_on_add:
self.clear_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
deleted_pk = self.pk
collector_result = super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
return collector_result
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
def clear_cache(self):
"""
Delete the cache keys associated with this model.
"""
cache.delete_many(self.get_cache_keys())
clear_cache.alters_data = True
def get_cache_keys(self):
"""
Get a list of all cache keys associated with this model.
"""
raise NotImplementedError("Implement get_cache_keys() or clear_cache()")
|
Make sure our CachedModelMixin.delete() also returns the collector results
|
Make sure our CachedModelMixin.delete() also returns the collector results
|
Python
|
apache-2.0
|
edoburu/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents
|
python
|
## Code Before:
from django.core.cache import cache
class CachedModelMixin(object):
"""
Mixin to add cache expiration to a model.
"""
clear_cache_on_add = False
def save(self, *args, **kwargs):
is_new = not self.pk or self._state.adding
super(CachedModelMixin, self).save(*args, **kwargs)
if not is_new or self.clear_cache_on_add:
self.clear_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
deleted_pk = self.pk
super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
def clear_cache(self):
"""
Delete the cache keys associated with this model.
"""
cache.delete_many(self.get_cache_keys())
clear_cache.alters_data = True
def get_cache_keys(self):
"""
Get a list of all cache keys associated with this model.
"""
raise NotImplementedError("Implement get_cache_keys() or clear_cache()")
## Instruction:
Make sure our CachedModelMixin.delete() also returns the collector results
## Code After:
from django.core.cache import cache
class CachedModelMixin(object):
"""
Mixin to add cache expiration to a model.
"""
clear_cache_on_add = False
def save(self, *args, **kwargs):
is_new = not self.pk or self._state.adding
super(CachedModelMixin, self).save(*args, **kwargs)
if not is_new or self.clear_cache_on_add:
self.clear_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
deleted_pk = self.pk
collector_result = super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
return collector_result
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
def clear_cache(self):
"""
Delete the cache keys associated with this model.
"""
cache.delete_many(self.get_cache_keys())
clear_cache.alters_data = True
def get_cache_keys(self):
"""
Get a list of all cache keys associated with this model.
"""
raise NotImplementedError("Implement get_cache_keys() or clear_cache()")
|
// ... existing code ...
def delete(self, *args, **kwargs):
deleted_pk = self.pk
collector_result = super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
return collector_result
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
// ... rest of the code ...
|
5c12b0c04b25e414b1bd04250cde0c3b1f869104
|
hr_emergency_contact/models/hr_employee.py
|
hr_emergency_contact/models/hr_employee.py
|
from openerp import models, fields
class HrEmployee(models.Model):
_name = 'hr.employee'
_inherit = 'hr.employee'
emergency_contact_ids = fields.Many2many(
string='Emergency Contacts',
comodel_name='res.partner',
relation='rel_employee_emergency_contact',
column1='employee_id',
column2='partner_id',
domain=[
('is_company', '=', False),
('parent_id', '=', False),
]
)
|
from openerp import models, fields
class HrEmployee(models.Model):
_inherit = 'hr.employee'
emergency_contact_ids = fields.Many2many(
string='Emergency Contacts',
comodel_name='res.partner',
relation='rel_employee_emergency_contact',
column1='employee_id',
column2='partner_id',
domain=[
('is_company', '=', False),
('parent_id', '=', False),
]
)
|
Remove _name attribute on hr.employee
|
Remove _name attribute on hr.employee
|
Python
|
agpl-3.0
|
VitalPet/hr,thinkopensolutions/hr,VitalPet/hr,xpansa/hr,Eficent/hr,Eficent/hr,feketemihai/hr,feketemihai/hr,acsone/hr,open-synergy/hr,open-synergy/hr,xpansa/hr,acsone/hr,thinkopensolutions/hr
|
python
|
## Code Before:
from openerp import models, fields
class HrEmployee(models.Model):
_name = 'hr.employee'
_inherit = 'hr.employee'
emergency_contact_ids = fields.Many2many(
string='Emergency Contacts',
comodel_name='res.partner',
relation='rel_employee_emergency_contact',
column1='employee_id',
column2='partner_id',
domain=[
('is_company', '=', False),
('parent_id', '=', False),
]
)
## Instruction:
Remove _name attribute on hr.employee
## Code After:
from openerp import models, fields
class HrEmployee(models.Model):
_inherit = 'hr.employee'
emergency_contact_ids = fields.Many2many(
string='Emergency Contacts',
comodel_name='res.partner',
relation='rel_employee_emergency_contact',
column1='employee_id',
column2='partner_id',
domain=[
('is_company', '=', False),
('parent_id', '=', False),
]
)
|
# ... existing code ...
class HrEmployee(models.Model):
_inherit = 'hr.employee'
emergency_contact_ids = fields.Many2many(
# ... rest of the code ...
|
c3de0c71a8392a884f8fd08dbee8f337ba7833c7
|
src/ggrc/migrations/versions/20130724021606_2bf7c04016c9_person_email_must_be.py
|
src/ggrc/migrations/versions/20130724021606_2bf7c04016c9_person_email_must_be.py
|
# revision identifiers, used by Alembic.
revision = '2bf7c04016c9'
down_revision = '2b709b655bf'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint('uq_people_email', 'people', ['email'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('uq_people_email', 'people', type_='unique')
### end Alembic commands ###
|
# revision identifiers, used by Alembic.
revision = '2bf7c04016c9'
down_revision = 'd3af6d071ef'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint('uq_people_email', 'people', ['email'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('uq_people_email', 'people', type_='unique')
### end Alembic commands ###
|
Update 'down' version in migration due to merge
|
Update 'down' version in migration due to merge
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,uskudnik/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,hasanalom/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,andrei-karalionak/ggrc-core
|
python
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '2bf7c04016c9'
down_revision = '2b709b655bf'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint('uq_people_email', 'people', ['email'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('uq_people_email', 'people', type_='unique')
### end Alembic commands ###
## Instruction:
Update 'down' version in migration due to merge
## Code After:
# revision identifiers, used by Alembic.
revision = '2bf7c04016c9'
down_revision = 'd3af6d071ef'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint('uq_people_email', 'people', ['email'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('uq_people_email', 'people', type_='unique')
### end Alembic commands ###
|
// ... existing code ...
# revision identifiers, used by Alembic.
revision = '2bf7c04016c9'
down_revision = 'd3af6d071ef'
from alembic import op
import sqlalchemy as sa
// ... rest of the code ...
|
7623966ac3962dfe871638b6804e056fa794ea60
|
api/webscripts/show_summary.py
|
api/webscripts/show_summary.py
|
from django import forms
from webscript import WebScript
from django.template.loader import render_to_string
import amcat.scripts.forms
import amcat.forms
from amcat.tools import keywordsearch
from amcat.scripts import script
#from amcat.scripts.searchscripts.articlelist import ArticleListScript, ArticleListSpecificForm
from amcat.scripts.searchscripts.articlelist import ArticleListScript
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
n = keywordsearch.get_total_n(self.data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
from webscript import WebScript
from amcat.tools import keywordsearch
from amcat.scripts.searchscripts.articlelist import ArticleListScript
from amcat.scripts.forms import SelectionForm
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
sf = SelectionForm(self.project, self.data)
sf.full_clean()
n = keywordsearch.get_total_n(sf.cleaned_data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
Clean user data before passing it to keywordsearch.get_total_n()
|
Clean user data before passing it to keywordsearch.get_total_n()
|
Python
|
agpl-3.0
|
amcat/amcat,tschmorleiz/amcat,tschmorleiz/amcat,tschmorleiz/amcat,amcat/amcat,amcat/amcat,tschmorleiz/amcat,amcat/amcat,tschmorleiz/amcat,amcat/amcat,amcat/amcat
|
python
|
## Code Before:
from django import forms
from webscript import WebScript
from django.template.loader import render_to_string
import amcat.scripts.forms
import amcat.forms
from amcat.tools import keywordsearch
from amcat.scripts import script
#from amcat.scripts.searchscripts.articlelist import ArticleListScript, ArticleListSpecificForm
from amcat.scripts.searchscripts.articlelist import ArticleListScript
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
n = keywordsearch.get_total_n(self.data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
## Instruction:
Clean user data before passing it to keywordsearch.get_total_n()
## Code After:
from webscript import WebScript
from amcat.tools import keywordsearch
from amcat.scripts.searchscripts.articlelist import ArticleListScript
from amcat.scripts.forms import SelectionForm
class ShowSummary(WebScript):
name = "Summary"
form_template = None
form = None
def run(self):
self.progress_monitor.update(1, "Creating summary")
if isinstance(self.data['projects'], (basestring, int)):
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
sf = SelectionForm(self.project, self.data)
sf.full_clean()
n = keywordsearch.get_total_n(sf.cleaned_data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
a.hack_project_id = project_id
self.output_template = 'api/webscripts/articlelist.html'
self.progress_monitor.update(40, "Created summary")
return self.outputResponse(dict(articlelist=articles, n=n, page=self.data.get('start')), ArticleListScript.output_type)
|
# ... existing code ...
from webscript import WebScript
from amcat.tools import keywordsearch
from amcat.scripts.searchscripts.articlelist import ArticleListScript
from amcat.scripts.forms import SelectionForm
class ShowSummary(WebScript):
# ... modified code ...
project_id = int(self.data['projects'])
else:
project_id = int(self.data['projects'][0])
sf = SelectionForm(self.project, self.data)
sf.full_clean()
n = keywordsearch.get_total_n(sf.cleaned_data)
self.progress_monitor.update(39, "Found {n} articles in total".format(**locals()))
articles = list(ArticleListScript(self.data).run())
for a in articles:
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.