commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
9375216f698d35aa2afbad5cd0ab10bfb82a8d1c
|
org.spoofax.jsglr2.integration/src/main/java/org/spoofax/jsglr2/testset/TestSetIncrementalInput.java
|
org.spoofax.jsglr2.integration/src/main/java/org/spoofax/jsglr2/testset/TestSetIncrementalInput.java
|
package org.spoofax.jsglr2.testset;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.spoofax.jsglr2.testset.testinput.IncrementalStringInput;
public class TestSetIncrementalInput extends TestSetInput<String[], IncrementalStringInput> {
// Directory in the org.spoofax.jsglr2.integration/src/main/resources/samples directory, containing files %d.in
public final String directory;
public TestSetIncrementalInput(String directory) {
super(Type.INCREMENTAL);
this.directory = directory;
}
@Override protected IncrementalStringInput getInput(String filename, String[] input) {
return new IncrementalStringInput(filename, input);
}
@Override public List<IncrementalStringInput> getInputs() throws IOException {
List<String> inputs = new ArrayList<>();
for(int i = 0;; i++) {
try {
inputs.add(getFileAsString(directory + "/" + i + ".in"));
} catch(NullPointerException | IOException ignored) {
break;
}
}
return Collections.singletonList(getInput(directory, inputs.toArray(new String[0])));
}
}
|
package org.spoofax.jsglr2.testset;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.spoofax.jsglr2.testset.testinput.IncrementalStringInput;
public class TestSetIncrementalInput extends TestSetInput<String[], IncrementalStringInput> {
// Directory in the org.spoofax.jsglr2.integration/src/main/resources/samples directory, containing files %d.in
public final String directory;
public TestSetIncrementalInput(String directory) {
super(Type.INCREMENTAL);
this.directory = directory;
}
@Override protected IncrementalStringInput getInput(String filename, String[] input) {
return new IncrementalStringInput(filename, input);
}
@Override public List<IncrementalStringInput> getInputs() throws IOException {
List<String> inputs = new ArrayList<>();
for(int i = 0;; i++) {
try {
inputs.add(getFileAsString(
directory + (directory.endsWith(File.separator) ? "" : File.separator) + i + ".in"));
} catch(NullPointerException | IOException ignored) {
break;
}
}
return Collections.singletonList(getInput(directory, inputs.toArray(new String[0])));
}
}
|
Fix duplicate trailing directory separator
|
Fix duplicate trailing directory separator
|
Java
|
apache-2.0
|
metaborg/jsglr,metaborg/jsglr,metaborg/jsglr,metaborg/jsglr
|
java
|
## Code Before:
package org.spoofax.jsglr2.testset;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.spoofax.jsglr2.testset.testinput.IncrementalStringInput;
public class TestSetIncrementalInput extends TestSetInput<String[], IncrementalStringInput> {
// Directory in the org.spoofax.jsglr2.integration/src/main/resources/samples directory, containing files %d.in
public final String directory;
public TestSetIncrementalInput(String directory) {
super(Type.INCREMENTAL);
this.directory = directory;
}
@Override protected IncrementalStringInput getInput(String filename, String[] input) {
return new IncrementalStringInput(filename, input);
}
@Override public List<IncrementalStringInput> getInputs() throws IOException {
List<String> inputs = new ArrayList<>();
for(int i = 0;; i++) {
try {
inputs.add(getFileAsString(directory + "/" + i + ".in"));
} catch(NullPointerException | IOException ignored) {
break;
}
}
return Collections.singletonList(getInput(directory, inputs.toArray(new String[0])));
}
}
## Instruction:
Fix duplicate trailing directory separator
## Code After:
package org.spoofax.jsglr2.testset;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.spoofax.jsglr2.testset.testinput.IncrementalStringInput;
public class TestSetIncrementalInput extends TestSetInput<String[], IncrementalStringInput> {
// Directory in the org.spoofax.jsglr2.integration/src/main/resources/samples directory, containing files %d.in
public final String directory;
public TestSetIncrementalInput(String directory) {
super(Type.INCREMENTAL);
this.directory = directory;
}
@Override protected IncrementalStringInput getInput(String filename, String[] input) {
return new IncrementalStringInput(filename, input);
}
@Override public List<IncrementalStringInput> getInputs() throws IOException {
List<String> inputs = new ArrayList<>();
for(int i = 0;; i++) {
try {
inputs.add(getFileAsString(
directory + (directory.endsWith(File.separator) ? "" : File.separator) + i + ".in"));
} catch(NullPointerException | IOException ignored) {
break;
}
}
return Collections.singletonList(getInput(directory, inputs.toArray(new String[0])));
}
}
|
...
package org.spoofax.jsglr2.testset;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
...
List<String> inputs = new ArrayList<>();
for(int i = 0;; i++) {
try {
inputs.add(getFileAsString(
directory + (directory.endsWith(File.separator) ? "" : File.separator) + i + ".in"));
} catch(NullPointerException | IOException ignored) {
break;
}
...
|
d1b7753fd29cb5c1f68b5ee121a511e43c99b5de
|
pmix/ppp/odkcalculate.py
|
pmix/ppp/odkcalculate.py
|
class OdkCalculate:
def __init__(self, row):
self.row = row
def to_html(self):
return ""
def to_text(self):
return ""
def __repr__(self):
return '<OdkCalculate {}>'.format(self.row['name'])
|
class OdkCalculate:
def __init__(self, row):
self.row = row
def to_html(self, *args, **kwargs):
return ""
def to_text(self, *args, **kwargs):
return ""
def __repr__(self):
return '<OdkCalculate {}>'.format(self.row['name'])
|
Update signature of to_text and to_html
|
Update signature of to_text and to_html
|
Python
|
mit
|
jkpr/pmix
|
python
|
## Code Before:
class OdkCalculate:
def __init__(self, row):
self.row = row
def to_html(self):
return ""
def to_text(self):
return ""
def __repr__(self):
return '<OdkCalculate {}>'.format(self.row['name'])
## Instruction:
Update signature of to_text and to_html
## Code After:
class OdkCalculate:
def __init__(self, row):
self.row = row
def to_html(self, *args, **kwargs):
return ""
def to_text(self, *args, **kwargs):
return ""
def __repr__(self):
return '<OdkCalculate {}>'.format(self.row['name'])
|
...
def __init__(self, row):
self.row = row
def to_html(self, *args, **kwargs):
return ""
def to_text(self, *args, **kwargs):
return ""
def __repr__(self):
...
|
c43a677e19ba1d2603dd4b7907fe053561c4fa06
|
neutron/objects/__init__.py
|
neutron/objects/__init__.py
|
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
utils.import_modules_recursively(sys.modules[__name__].__file__)
|
import os
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
dirn = os.path.dirname(sys.modules[__name__].__file__)
utils.import_modules_recursively(dirn)
|
Use dirname in object recursive import
|
Use dirname in object recursive import
__file__ just returns the init file which there was nothing
under.
TrivialFix
Change-Id: I39da8a50c0b9197b7a5cb3d5ca4fd95f8d739eaa
|
Python
|
apache-2.0
|
openstack/neutron,huntxu/neutron,openstack/neutron,eayunstack/neutron,eayunstack/neutron,huntxu/neutron,mahak/neutron,openstack/neutron,mahak/neutron,mahak/neutron,noironetworks/neutron,noironetworks/neutron
|
python
|
## Code Before:
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
utils.import_modules_recursively(sys.modules[__name__].__file__)
## Instruction:
Use dirname in object recursive import
__file__ just returns the init file which there was nothing
under.
TrivialFix
Change-Id: I39da8a50c0b9197b7a5cb3d5ca4fd95f8d739eaa
## Code After:
import os
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
dirn = os.path.dirname(sys.modules[__name__].__file__)
utils.import_modules_recursively(dirn)
|
# ... existing code ...
import os
import sys
# ... modified code ...
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
dirn = os.path.dirname(sys.modules[__name__].__file__)
utils.import_modules_recursively(dirn)
# ... rest of the code ...
|
8a9e2666e173078ea98e52522442e6e943cc9e6e
|
banks.py
|
banks.py
|
from collections import namedtuple
Bank = namedtuple('Bank', ['name', 'header', 'delimiter'])
banks = dict()
def toKey(name : str) -> str:
key = [c for c in name if c.isalpha()]
key = ''.join(key)
return key.lower()
NordeaHeader = ['Date', 'Transaction', 'Memo', 'Amount', 'Balance']
Nordea = Bank('Nordea', NordeaHeader, delimiter=',')
banks[toKey(Nordea.name)] = Nordea
IcaHeader = ['Date', 'Payee', 'Transaction', 'Memo', 'Amount', 'Balance']
Ica = Bank('ICA Banken', IcaHeader, delimiter=';')
banks[toKey(Ica.name)] = Ica
|
from collections import namedtuple
Bank = namedtuple('Bank', ['name', 'header', 'delimiter'])
banks = dict()
def toKey(name : str) -> str:
key = [c for c in name if c.isalpha()]
key = ''.join(key)
return key.lower()
NordeaOldHeader = ['Date', 'Transaction', 'Memo', 'Amount', 'Balance']
NordeaOld = Bank('Nordea (gamla)', NordeaOldHeader, delimiter=',')
banks[toKey(NordeaOld.name)] = NordeaOld
# All information regarding the payee is in a different field called "Rubrik"
# while "Betalningsmottagare" (i.e, "payee" in English) is empty.
# This makes no sense, but that's the format they currently use.
NordeaHeader = ['Date', 'Amount', "Sender" ,"TruePayee", "Name", "Payee", "Balance", "Currency"]
Nordea = Bank('Nordea', NordeaHeader, delimiter=';')
banks[toKey(Nordea.name)] = Nordea
IcaHeader = ['Date', 'Payee', 'Transaction', 'Memo', 'Amount', 'Balance']
Ica = Bank('ICA Banken', IcaHeader, delimiter=';')
banks[toKey(Ica.name)] = Ica
|
Prepare for new Nordea header
|
Prepare for new Nordea header
In June, Nordea will switch to a new web-interface.
This new interface also has a new csv-export format.
This commit adapts Nordea to the new format, while the previous
header is called "Nordea (gamal)".
I might remove the previous header come late June, after Nordea has
made the switch.
|
Python
|
mit
|
adnilsson/Bank2YNAB
|
python
|
## Code Before:
from collections import namedtuple
Bank = namedtuple('Bank', ['name', 'header', 'delimiter'])
banks = dict()
def toKey(name : str) -> str:
key = [c for c in name if c.isalpha()]
key = ''.join(key)
return key.lower()
NordeaHeader = ['Date', 'Transaction', 'Memo', 'Amount', 'Balance']
Nordea = Bank('Nordea', NordeaHeader, delimiter=',')
banks[toKey(Nordea.name)] = Nordea
IcaHeader = ['Date', 'Payee', 'Transaction', 'Memo', 'Amount', 'Balance']
Ica = Bank('ICA Banken', IcaHeader, delimiter=';')
banks[toKey(Ica.name)] = Ica
## Instruction:
Prepare for new Nordea header
In June, Nordea will switch to a new web-interface.
This new interface also has a new csv-export format.
This commit adapts Nordea to the new format, while the previous
header is called "Nordea (gamal)".
I might remove the previous header come late June, after Nordea has
made the switch.
## Code After:
from collections import namedtuple
Bank = namedtuple('Bank', ['name', 'header', 'delimiter'])
banks = dict()
def toKey(name : str) -> str:
key = [c for c in name if c.isalpha()]
key = ''.join(key)
return key.lower()
NordeaOldHeader = ['Date', 'Transaction', 'Memo', 'Amount', 'Balance']
NordeaOld = Bank('Nordea (gamla)', NordeaOldHeader, delimiter=',')
banks[toKey(NordeaOld.name)] = NordeaOld
# All information regarding the payee is in a different field called "Rubrik"
# while "Betalningsmottagare" (i.e, "payee" in English) is empty.
# This makes no sense, but that's the format they currently use.
NordeaHeader = ['Date', 'Amount', "Sender" ,"TruePayee", "Name", "Payee", "Balance", "Currency"]
Nordea = Bank('Nordea', NordeaHeader, delimiter=';')
banks[toKey(Nordea.name)] = Nordea
IcaHeader = ['Date', 'Payee', 'Transaction', 'Memo', 'Amount', 'Balance']
Ica = Bank('ICA Banken', IcaHeader, delimiter=';')
banks[toKey(Ica.name)] = Ica
|
...
key = ''.join(key)
return key.lower()
NordeaOldHeader = ['Date', 'Transaction', 'Memo', 'Amount', 'Balance']
NordeaOld = Bank('Nordea (gamla)', NordeaOldHeader, delimiter=',')
banks[toKey(NordeaOld.name)] = NordeaOld
# All information regarding the payee is in a different field called "Rubrik"
# while "Betalningsmottagare" (i.e, "payee" in English) is empty.
# This makes no sense, but that's the format they currently use.
NordeaHeader = ['Date', 'Amount', "Sender" ,"TruePayee", "Name", "Payee", "Balance", "Currency"]
Nordea = Bank('Nordea', NordeaHeader, delimiter=';')
banks[toKey(Nordea.name)] = Nordea
IcaHeader = ['Date', 'Payee', 'Transaction', 'Memo', 'Amount', 'Balance']
...
|
7629afde2627457b4f4b19e1542a87e695c1837d
|
tests/events/test_models.py
|
tests/events/test_models.py
|
"""Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
|
"""Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
# GIVEN an event
e = EventFactory.create(
date=datetime.date(2018, 1, 1),
venue=None,
)
# WHEN changing the date
assert e.slug.startswith('2018-01-01')
e.date = datetime.date(2018, 1, 2)
e.save()
# THEN the slug changes to reflect the new date
assert e.slug.startswith('2018-01-02')
|
Make sure slug gets updated on date change
|
Make sure slug gets updated on date change
|
Python
|
mit
|
FlowFX/reggae-cdmx,FlowFX/reggae-cdmx
|
python
|
## Code Before:
"""Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
## Instruction:
Make sure slug gets updated on date change
## Code After:
"""Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
# GIVEN an event
e = EventFactory.create(
date=datetime.date(2018, 1, 1),
venue=None,
)
# WHEN changing the date
assert e.slug.startswith('2018-01-01')
e.date = datetime.date(2018, 1, 2)
e.save()
# THEN the slug changes to reflect the new date
assert e.slug.startswith('2018-01-02')
|
// ... existing code ...
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
# GIVEN an event
e = EventFactory.create(
date=datetime.date(2018, 1, 1),
venue=None,
)
# WHEN changing the date
assert e.slug.startswith('2018-01-01')
e.date = datetime.date(2018, 1, 2)
e.save()
# THEN the slug changes to reflect the new date
assert e.slug.startswith('2018-01-02')
// ... rest of the code ...
|
1594644990fe6c621d309b587d844669ec273dec
|
setup.py
|
setup.py
|
import os
import sys
from setuptools import setup, find_packages
from pkg_resources import resource_filename
# depending on your execution context the version file
# may be located in a different place!
vsn_path = resource_filename(__name__, 'hvac/version')
if not os.path.exists(vsn_path):
vsn_path = resource_filename(__name__, 'version')
if not os.path.exists(vsn_path):
print("%s is missing" % vsn_path)
sys.exit(1)
setup(
name='hvac',
version=open(vsn_path, 'r').read(),
description='HashiCorp Vault API client',
author='Ian Unruh',
author_email='[email protected]',
url='https://github.com/ianunruh/hvac',
keywords=['hashicorp', 'vault'],
classifiers=['License :: OSI Approved :: Apache Software License'],
packages=find_packages(),
install_requires=[
'requests>=2.7.0',
],
include_package_data=True,
package_data={'hvac': ['version']},
extras_require={
'parser': ['pyhcl>=0.2.1,<0.3.0']
}
)
|
import os
import sys
from setuptools import setup, find_packages
from pkg_resources import resource_filename
with open("README.md", "r") as fh:
long_description = fh.read()
# depending on your execution context the version file
# may be located in a different place!
vsn_path = resource_filename(__name__, 'hvac/version')
if not os.path.exists(vsn_path):
vsn_path = resource_filename(__name__, 'version')
if not os.path.exists(vsn_path):
print("%s is missing" % vsn_path)
sys.exit(1)
setup(
name='hvac',
version=open(vsn_path, 'r').read(),
description='HashiCorp Vault API client',
long_description=long_description,
long_description_content_type="text/markdown",
author='Ian Unruh',
author_email='[email protected]',
url='https://github.com/ianunruh/hvac',
keywords=['hashicorp', 'vault'],
classifiers=['License :: OSI Approved :: Apache Software License'],
packages=find_packages(),
install_requires=[
'requests>=2.7.0',
],
include_package_data=True,
package_data={'hvac': ['version']},
extras_require={
'parser': ['pyhcl>=0.2.1,<0.3.0']
}
)
|
Include README.md in package metadata
|
Include README.md in package metadata
|
Python
|
apache-2.0
|
ianunruh/hvac,ianunruh/hvac
|
python
|
## Code Before:
import os
import sys
from setuptools import setup, find_packages
from pkg_resources import resource_filename
# depending on your execution context the version file
# may be located in a different place!
vsn_path = resource_filename(__name__, 'hvac/version')
if not os.path.exists(vsn_path):
vsn_path = resource_filename(__name__, 'version')
if not os.path.exists(vsn_path):
print("%s is missing" % vsn_path)
sys.exit(1)
setup(
name='hvac',
version=open(vsn_path, 'r').read(),
description='HashiCorp Vault API client',
author='Ian Unruh',
author_email='[email protected]',
url='https://github.com/ianunruh/hvac',
keywords=['hashicorp', 'vault'],
classifiers=['License :: OSI Approved :: Apache Software License'],
packages=find_packages(),
install_requires=[
'requests>=2.7.0',
],
include_package_data=True,
package_data={'hvac': ['version']},
extras_require={
'parser': ['pyhcl>=0.2.1,<0.3.0']
}
)
## Instruction:
Include README.md in package metadata
## Code After:
import os
import sys
from setuptools import setup, find_packages
from pkg_resources import resource_filename
with open("README.md", "r") as fh:
long_description = fh.read()
# depending on your execution context the version file
# may be located in a different place!
vsn_path = resource_filename(__name__, 'hvac/version')
if not os.path.exists(vsn_path):
vsn_path = resource_filename(__name__, 'version')
if not os.path.exists(vsn_path):
print("%s is missing" % vsn_path)
sys.exit(1)
setup(
name='hvac',
version=open(vsn_path, 'r').read(),
description='HashiCorp Vault API client',
long_description=long_description,
long_description_content_type="text/markdown",
author='Ian Unruh',
author_email='[email protected]',
url='https://github.com/ianunruh/hvac',
keywords=['hashicorp', 'vault'],
classifiers=['License :: OSI Approved :: Apache Software License'],
packages=find_packages(),
install_requires=[
'requests>=2.7.0',
],
include_package_data=True,
package_data={'hvac': ['version']},
extras_require={
'parser': ['pyhcl>=0.2.1,<0.3.0']
}
)
|
...
import sys
from setuptools import setup, find_packages
from pkg_resources import resource_filename
with open("README.md", "r") as fh:
long_description = fh.read()
# depending on your execution context the version file
# may be located in a different place!
...
name='hvac',
version=open(vsn_path, 'r').read(),
description='HashiCorp Vault API client',
long_description=long_description,
long_description_content_type="text/markdown",
author='Ian Unruh',
author_email='[email protected]',
url='https://github.com/ianunruh/hvac',
...
|
87bf34f73db4a8481205b4cdfaaf72d9a393e60c
|
src/main/java/co/phoenixlab/discord/commands/CommandUtil.java
|
src/main/java/co/phoenixlab/discord/commands/CommandUtil.java
|
package co.phoenixlab.discord.commands;
import co.phoenixlab.discord.MessageContext;
import co.phoenixlab.discord.api.entities.Channel;
import co.phoenixlab.discord.api.entities.Message;
import co.phoenixlab.discord.api.entities.User;
public class CommandUtil {
static User findUser(MessageContext context, String username) {
Message message = context.getMessage();
User user;
Channel channel = context.getApiClient().getChannelById(message.getChannelId());
// Attempt to find the given user
// If the user is @mentioned, try that first
if (message.getMentions() != null && message.getMentions().length > 0) {
user = message.getMentions()[0];
} else {
user = context.getApiClient().findUser(username, channel.getParent());
}
return user;
}
}
|
package co.phoenixlab.discord.commands;
import co.phoenixlab.discord.MessageContext;
import co.phoenixlab.discord.api.entities.Channel;
import co.phoenixlab.discord.api.entities.Message;
import co.phoenixlab.discord.api.entities.User;
public class CommandUtil {
static User findUser(MessageContext context, String username) {
Message message = context.getMessage();
User user;
Channel channel = context.getApiClient().getChannelById(message.getChannelId());
// Attempt to find the given user
// If the user is @mentioned, try that first
if (message.getMentions() != null && message.getMentions().length > 0) {
user = message.getMentions()[0];
} else {
user = context.getApiClient().findUser(username, channel.getParent());
}
// Try matching by ID
if (user == null) {
user = context.getApiClient().getUserById(username);
}
return user;
}
}
|
Add ability to reference users by ID
|
Add ability to reference users by ID
|
Java
|
mit
|
vincentzhang96/VahrhedralBot
|
java
|
## Code Before:
package co.phoenixlab.discord.commands;
import co.phoenixlab.discord.MessageContext;
import co.phoenixlab.discord.api.entities.Channel;
import co.phoenixlab.discord.api.entities.Message;
import co.phoenixlab.discord.api.entities.User;
public class CommandUtil {
static User findUser(MessageContext context, String username) {
Message message = context.getMessage();
User user;
Channel channel = context.getApiClient().getChannelById(message.getChannelId());
// Attempt to find the given user
// If the user is @mentioned, try that first
if (message.getMentions() != null && message.getMentions().length > 0) {
user = message.getMentions()[0];
} else {
user = context.getApiClient().findUser(username, channel.getParent());
}
return user;
}
}
## Instruction:
Add ability to reference users by ID
## Code After:
package co.phoenixlab.discord.commands;
import co.phoenixlab.discord.MessageContext;
import co.phoenixlab.discord.api.entities.Channel;
import co.phoenixlab.discord.api.entities.Message;
import co.phoenixlab.discord.api.entities.User;
public class CommandUtil {
static User findUser(MessageContext context, String username) {
Message message = context.getMessage();
User user;
Channel channel = context.getApiClient().getChannelById(message.getChannelId());
// Attempt to find the given user
// If the user is @mentioned, try that first
if (message.getMentions() != null && message.getMentions().length > 0) {
user = message.getMentions()[0];
} else {
user = context.getApiClient().findUser(username, channel.getParent());
}
// Try matching by ID
if (user == null) {
user = context.getApiClient().getUserById(username);
}
return user;
}
}
|
// ... existing code ...
} else {
user = context.getApiClient().findUser(username, channel.getParent());
}
// Try matching by ID
if (user == null) {
user = context.getApiClient().getUserById(username);
}
return user;
}
}
// ... rest of the code ...
|
23f2306617a4e4bceecd20190c328b2b3418abc4
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='datreant',
version='0.5.1',
author='David Dotson',
author_email='[email protected]',
packages=['datreant', 'datreant.tests'],
scripts=[],
license='BSD',
long_description=open('README.rst').read(),
install_requires=['pandas', 'tables', 'h5py', 'scandir']
)
|
from setuptools import setup
setup(name='datreant',
version='0.5.1',
author='David Dotson',
author_email='[email protected]',
packages=['datreant', 'datreant.tests'],
scripts=[],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
'numpy',
'pandas',
'tables',
'h5py',
'scandir',
'PyYAML'
]
)
|
Add PyYAML & numpy dependency
|
Add PyYAML & numpy dependency
I'm adding numpy too because we import it directly.
|
Python
|
bsd-3-clause
|
datreant/datreant,dotsdl/datreant,datreant/datreant.core,datreant/datreant.core,datreant/datreant,datreant/datreant.data
|
python
|
## Code Before:
from setuptools import setup
setup(name='datreant',
version='0.5.1',
author='David Dotson',
author_email='[email protected]',
packages=['datreant', 'datreant.tests'],
scripts=[],
license='BSD',
long_description=open('README.rst').read(),
install_requires=['pandas', 'tables', 'h5py', 'scandir']
)
## Instruction:
Add PyYAML & numpy dependency
I'm adding numpy too because we import it directly.
## Code After:
from setuptools import setup
setup(name='datreant',
version='0.5.1',
author='David Dotson',
author_email='[email protected]',
packages=['datreant', 'datreant.tests'],
scripts=[],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
'numpy',
'pandas',
'tables',
'h5py',
'scandir',
'PyYAML'
]
)
|
# ... existing code ...
setup(name='datreant',
version='0.5.1',
author='David Dotson',
author_email='[email protected]',
packages=['datreant', 'datreant.tests'],
scripts=[],
license='BSD',
long_description=open('README.rst').read(),
install_requires=[
'numpy',
'pandas',
'tables',
'h5py',
'scandir',
'PyYAML'
]
)
# ... rest of the code ...
|
b352c3e1f5e8812d29f2e8a1bca807bea5da8cc4
|
test/test_hx_launcher.py
|
test/test_hx_launcher.py
|
import pytest_twisted
from hendrix.ux import main
from hendrix.options import HendrixOptionParser
def test_no_arguments_gives_help_text(mocker):
class MockFile(object):
@classmethod
def write(cls, whatever):
cls.things_written = whatever
class MockStdOut(object):
@classmethod
def write(cls, whatever):
HendrixOptionParser.print_help(MockFile)
assert MockFile.things_written == whatever
mocker.patch('sys.stdout', new=MockStdOut)
main([])
|
from hendrix.options import HendrixOptionParser
from hendrix.ux import main
def test_no_arguments_gives_help_text(mocker):
class MockFile(object):
@classmethod
def write(cls, whatever):
cls.things_written = whatever
class MockStdOut(object):
@classmethod
def write(cls, whatever):
HendrixOptionParser.print_help(MockFile)
assert MockFile.things_written == whatever
mocker.patch('sys.stdout', new=MockStdOut)
main([])
|
Test for the hx launcher.
|
Test for the hx launcher.
|
Python
|
mit
|
hangarunderground/hendrix,hendrix/hendrix,hangarunderground/hendrix,hendrix/hendrix,jMyles/hendrix,hendrix/hendrix,jMyles/hendrix,hangarunderground/hendrix,hangarunderground/hendrix,jMyles/hendrix
|
python
|
## Code Before:
import pytest_twisted
from hendrix.ux import main
from hendrix.options import HendrixOptionParser
def test_no_arguments_gives_help_text(mocker):
class MockFile(object):
@classmethod
def write(cls, whatever):
cls.things_written = whatever
class MockStdOut(object):
@classmethod
def write(cls, whatever):
HendrixOptionParser.print_help(MockFile)
assert MockFile.things_written == whatever
mocker.patch('sys.stdout', new=MockStdOut)
main([])
## Instruction:
Test for the hx launcher.
## Code After:
from hendrix.options import HendrixOptionParser
from hendrix.ux import main
def test_no_arguments_gives_help_text(mocker):
class MockFile(object):
@classmethod
def write(cls, whatever):
cls.things_written = whatever
class MockStdOut(object):
@classmethod
def write(cls, whatever):
HendrixOptionParser.print_help(MockFile)
assert MockFile.things_written == whatever
mocker.patch('sys.stdout', new=MockStdOut)
main([])
|
...
from hendrix.options import HendrixOptionParser
from hendrix.ux import main
def test_no_arguments_gives_help_text(mocker):
class MockFile(object):
@classmethod
def write(cls, whatever):
...
|
35ffe6bb97a30970d4bc3c265b6337712669ee09
|
githubsetupircnotifications.py
|
githubsetupircnotifications.py
|
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('organization')
parser.add_argument('channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.organization)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
|
import argparse
import getpass
import sys
import github3
def error(message):
print(message)
sys.exit(1)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('organization')
parser.add_argument('channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
error('Failed to sign into github')
org = github.organization(args.organization)
if org is None:
error('Organization "{}" does not appear to exist'.format(args.org))
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
|
Add error function to remove duplicate code
|
Add error function to remove duplicate code
|
Python
|
mit
|
kragniz/github-setup-irc-notifications
|
python
|
## Code Before:
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('organization')
parser.add_argument('channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.organization)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
## Instruction:
Add error function to remove duplicate code
## Code After:
import argparse
import getpass
import sys
import github3
def error(message):
print(message)
sys.exit(1)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('organization')
parser.add_argument('channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
error('Failed to sign into github')
org = github.organization(args.organization)
if org is None:
error('Organization "{}" does not appear to exist'.format(args.org))
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
|
...
import github3
def error(message):
print(message)
sys.exit(1)
def main():
parser = argparse.ArgumentParser()
...
github = github3.login(args.username, password=password)
if github is None:
error('Failed to sign into github')
org = github.organization(args.organization)
if org is None:
error('Organization "{}" does not appear to exist'.format(args.org))
conf = {'nickserv_password': '',
'no_colors': '0',
...
|
9cd3bb79126fa2431ba4ae03811ac30fb77b9b46
|
netcat.py
|
netcat.py
|
import argparse
import socket
import sys
parser = argparse.ArgumentParser(description='Simple netcat in pure python.')
parser.add_argument('-z', '--scan', action='store_true')
parser.add_argument('-w', '--timeout', metavar='SECONDS', type=int)
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
if args.scan:
try:
connection = socket.create_connection((args.host, args.port), args.timeout)
if args.verbose:
print "Connection to {} {} port (tcp) succeeded!".format(args.host, args.port)
sys.exit(0)
except socket.error as msg:
if args.verbose:
print "Connection to {} {} port (tcp) failed. {}".format(args.host, args.port, msg)
sys.exit(1)
else:
print 'Not implemented'
|
import argparse
import socket
import sys
parser = argparse.ArgumentParser(description='Simple netcat in pure python.')
parser.add_argument('-s', '--source', metavar='ADDRESS')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-w', '--wait', metavar='SECONDS', type=int)
parser.add_argument('-z', '--zero', action='store_true')
parser.add_argument('host')
parser.add_argument('port')
args = parser.parse_args()
# Set a souce address for socket connection
source = ('', 0)
if args.source:
source = (args.source, 0) # port 0 specifies that the OS will choose a port
# exit successfully if the connection succeeds
if args.zero:
try:
connection = socket.create_connection((args.host, args.port), args.wait, source)
if args.verbose:
print("Connection to {} {} port (tcp) succeeded!".format(args.host, args.port))
sys.exit(0)
except socket.error as msg:
if args.verbose:
print("Connection to {} {} port (tcp) failed. {}".format(args.host, args.port, msg))
sys.exit(1)
else:
print('Not implemented')
|
Support python 2 and 3
|
Support python 2 and 3
Add source argument.
Update arguments to use long names from GNU netcat.
|
Python
|
unlicense
|
benformosa/Toolbox,benformosa/Toolbox
|
python
|
## Code Before:
import argparse
import socket
import sys
parser = argparse.ArgumentParser(description='Simple netcat in pure python.')
parser.add_argument('-z', '--scan', action='store_true')
parser.add_argument('-w', '--timeout', metavar='SECONDS', type=int)
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('host')
parser.add_argument('port', type=int)
args = parser.parse_args()
if args.scan:
try:
connection = socket.create_connection((args.host, args.port), args.timeout)
if args.verbose:
print "Connection to {} {} port (tcp) succeeded!".format(args.host, args.port)
sys.exit(0)
except socket.error as msg:
if args.verbose:
print "Connection to {} {} port (tcp) failed. {}".format(args.host, args.port, msg)
sys.exit(1)
else:
print 'Not implemented'
## Instruction:
Support python 2 and 3
Add source argument.
Update arguments to use long names from GNU netcat.
## Code After:
import argparse
import socket
import sys
parser = argparse.ArgumentParser(description='Simple netcat in pure python.')
parser.add_argument('-s', '--source', metavar='ADDRESS')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-w', '--wait', metavar='SECONDS', type=int)
parser.add_argument('-z', '--zero', action='store_true')
parser.add_argument('host')
parser.add_argument('port')
args = parser.parse_args()
# Set a souce address for socket connection
source = ('', 0)
if args.source:
source = (args.source, 0) # port 0 specifies that the OS will choose a port
# exit successfully if the connection succeeds
if args.zero:
try:
connection = socket.create_connection((args.host, args.port), args.wait, source)
if args.verbose:
print("Connection to {} {} port (tcp) succeeded!".format(args.host, args.port))
sys.exit(0)
except socket.error as msg:
if args.verbose:
print("Connection to {} {} port (tcp) failed. {}".format(args.host, args.port, msg))
sys.exit(1)
else:
print('Not implemented')
|
# ... existing code ...
import sys
parser = argparse.ArgumentParser(description='Simple netcat in pure python.')
parser.add_argument('-s', '--source', metavar='ADDRESS')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-w', '--wait', metavar='SECONDS', type=int)
parser.add_argument('-z', '--zero', action='store_true')
parser.add_argument('host')
parser.add_argument('port')
args = parser.parse_args()
# Set a souce address for socket connection
source = ('', 0)
if args.source:
source = (args.source, 0) # port 0 specifies that the OS will choose a port
# exit successfully if the connection succeeds
if args.zero:
try:
connection = socket.create_connection((args.host, args.port), args.wait, source)
if args.verbose:
print("Connection to {} {} port (tcp) succeeded!".format(args.host, args.port))
sys.exit(0)
except socket.error as msg:
if args.verbose:
print("Connection to {} {} port (tcp) failed. {}".format(args.host, args.port, msg))
sys.exit(1)
else:
print('Not implemented')
# ... rest of the code ...
|
0a5ffd4b837b57412f4943b9d62ce2d109315db3
|
You-QueryEngine/api.h
|
You-QueryEngine/api.h
|
/// \file api.h
/// Defines the API for Query Engine.
/// \author A0112054Y
#pragma once
#ifndef YOU_QUERYENGINE_API_H_
#define YOU_QUERYENGINE_API_H_
#include <memory>
#include <boost/variant.hpp>
#include "task_model.h"
namespace You {
namespace QueryEngine {
/// A synthesized type for holding query responses
typedef boost::variant <
std::vector<Task>,
Task,
Task::ID,
Task::Time,
Task::Dependencies,
Task::Description
> Response;
/// Base class for all queries.
class Query {
friend Response executeQuery(std::unique_ptr<Query> query);
private:
/// Execute the query.
virtual Response execute() = 0;
};
/// \name Query Constructors
/// @{
/// Construct a query for adding a task
/// \note Please use Task::DEFAULT_xxx to replace incomplete fields.
std::unique_ptr<Query>
AddTask(Task::Description description, Task::Time deadline,
Task::Priority priority, Task::Dependencies dependencies);
/// @}
/// Execute a query and return a response
/// \return The result of the query as a response object.
Response executeQuery(std::unique_ptr<Query> query);
} // namespace QueryEngine
} // namespace You
#endif // YOU_QUERYENGINE_API_H_
|
/// \file api.h
/// Defines the API for Query Engine.
/// \author A0112054Y
#pragma once
#ifndef YOU_QUERYENGINE_API_H_
#define YOU_QUERYENGINE_API_H_
#include <memory>
#include <boost/variant.hpp>
#include "task_model.h"
namespace You {
namespace QueryEngine {
/// A synthesized type for holding query responses
typedef boost::variant <
std::vector<Task>,
Task,
Task::ID,
Task::Time,
Task::Dependencies,
Task::Description
> Response;
/// Base class for all queries.
class Query {
friend Response executeQuery(std::unique_ptr<Query> query);
private:
/// Execute the query.
virtual Response execute() = 0;
};
/// \name Query Constructors
/// @{
/// Construct a query for adding a task
/// \note Please use Task::DEFAULT_xxx to replace incomplete fields.
std::unique_ptr<Query>
AddTask(Task::Description description, Task::Time deadline,
Task::Priority priority, Task::Dependencies dependencies);
std::unique_ptr<Query>
FilterTask(const std::function<bool(Task)>& filter);
/// @}
/// Execute a query and return a response
/// \return The result of the query as a response object.
Response executeQuery(std::unique_ptr<Query> query);
} // namespace QueryEngine
} // namespace You
#endif // YOU_QUERYENGINE_API_H_
|
Add filter stub in header
|
Add filter stub in header
|
C
|
mit
|
cs2103aug2014-w10-1c/main,cs2103aug2014-w10-1c/main
|
c
|
## Code Before:
/// \file api.h
/// Defines the API for Query Engine.
/// \author A0112054Y
#pragma once
#ifndef YOU_QUERYENGINE_API_H_
#define YOU_QUERYENGINE_API_H_
#include <memory>
#include <boost/variant.hpp>
#include "task_model.h"
namespace You {
namespace QueryEngine {
/// A synthesized type for holding query responses
typedef boost::variant <
std::vector<Task>,
Task,
Task::ID,
Task::Time,
Task::Dependencies,
Task::Description
> Response;
/// Base class for all queries.
class Query {
friend Response executeQuery(std::unique_ptr<Query> query);
private:
/// Execute the query.
virtual Response execute() = 0;
};
/// \name Query Constructors
/// @{
/// Construct a query for adding a task
/// \note Please use Task::DEFAULT_xxx to replace incomplete fields.
std::unique_ptr<Query>
AddTask(Task::Description description, Task::Time deadline,
Task::Priority priority, Task::Dependencies dependencies);
/// @}
/// Execute a query and return a response
/// \return The result of the query as a response object.
Response executeQuery(std::unique_ptr<Query> query);
} // namespace QueryEngine
} // namespace You
#endif // YOU_QUERYENGINE_API_H_
## Instruction:
Add filter stub in header
## Code After:
/// \file api.h
/// Defines the API for Query Engine.
/// \author A0112054Y
#pragma once
#ifndef YOU_QUERYENGINE_API_H_
#define YOU_QUERYENGINE_API_H_
#include <memory>
#include <boost/variant.hpp>
#include "task_model.h"
namespace You {
namespace QueryEngine {
/// A synthesized type for holding query responses
typedef boost::variant <
std::vector<Task>,
Task,
Task::ID,
Task::Time,
Task::Dependencies,
Task::Description
> Response;
/// Base class for all queries.
class Query {
friend Response executeQuery(std::unique_ptr<Query> query);
private:
/// Execute the query.
virtual Response execute() = 0;
};
/// \name Query Constructors
/// @{
/// Construct a query for adding a task
/// \note Please use Task::DEFAULT_xxx to replace incomplete fields.
std::unique_ptr<Query>
AddTask(Task::Description description, Task::Time deadline,
Task::Priority priority, Task::Dependencies dependencies);
std::unique_ptr<Query>
FilterTask(const std::function<bool(Task)>& filter);
/// @}
/// Execute a query and return a response
/// \return The result of the query as a response object.
Response executeQuery(std::unique_ptr<Query> query);
} // namespace QueryEngine
} // namespace You
#endif // YOU_QUERYENGINE_API_H_
|
...
std::unique_ptr<Query>
AddTask(Task::Description description, Task::Time deadline,
Task::Priority priority, Task::Dependencies dependencies);
std::unique_ptr<Query>
FilterTask(const std::function<bool(Task)>& filter);
/// @}
/// Execute a query and return a response
...
|
ec64c568bece0aa1ddcceb584bfbe976ef20956c
|
mathosphere-core/src/test/java/com/formulasearchengine/mathosphere/mlp/cli/CliParamsTest.java
|
mathosphere-core/src/test/java/com/formulasearchengine/mathosphere/mlp/cli/CliParamsTest.java
|
package com.formulasearchengine.mathosphere.mlp.cli;
import org.junit.Test;
import static org.junit.Assert.*;
public class CliParamsTest {
@Test
public void help() {
String[] args = {"help"};
CliParams params = CliParams.from(args);
assertEquals("help", params.getCommand());
}
@Test
public void useTex() {
String[] args = {"mlp", "--tex"};
CliParams params = CliParams.from(args);
assertEquals("mlp", params.getCommand());
assertTrue(params.getMlpCommandConfig().getUseTeXIdentifiers());
}
@Test
public void notUseTex() {
String[] args = {"mlp"};
CliParams params = CliParams.from(args);
assertEquals("mlp", params.getCommand());
assertFalse(params.getMlpCommandConfig().getUseTeXIdentifiers());
}
@Test
public void count() {
String[] args = {"count", "--formulae", "-in", "c:/tmp/mlp/input/"};
CliParams params = CliParams.from(args);
assertEquals("count", params.getCommand());
CountCommandConfig count = params.getCount();
assertTrue(count.isFormulas());
assertFalse(count.isDefinitions());
assertFalse(count.isIdentifiers());
assertEquals("c:/tmp/mlp/input/", count.getInput());
}
}
|
package com.formulasearchengine.mathosphere.mlp.cli;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class CliParamsTest {
@Test
public void help() {
String[] args = {"help"};
CliParams params = CliParams.from(args);
assertEquals("help", params.getCommand());
}
@Test
public void texvcinfo() {
String[] args = {"tags", "--texvcinfo", "expected"};
CliParams params = CliParams.from(args);
assertEquals("expected", params.getTagsCommandConfig().getTexvcinfoUrl());
}
@Test
public void useTex() {
String[] args = {"mlp", "--tex"};
CliParams params = CliParams.from(args);
assertEquals("mlp", params.getCommand());
assertTrue(params.getMlpCommandConfig().getUseTeXIdentifiers());
}
@Test
public void notUseTex() {
String[] args = {"mlp"};
CliParams params = CliParams.from(args);
assertEquals("mlp", params.getCommand());
assertFalse(params.getMlpCommandConfig().getUseTeXIdentifiers());
}
@Test
public void count() {
String[] args = {"count", "--formulae", "-in", "c:/tmp/mlp/input/"};
CliParams params = CliParams.from(args);
assertEquals("count", params.getCommand());
CountCommandConfig count = params.getCount();
assertTrue(count.isFormulas());
assertFalse(count.isDefinitions());
assertFalse(count.isIdentifiers());
assertEquals("c:/tmp/mlp/input/", count.getInput());
}
}
|
Test setting texvcinfo url from cli
|
Test setting texvcinfo url from cli
|
Java
|
apache-2.0
|
TU-Berlin/mathosphere,TU-Berlin/mathosphere
|
java
|
## Code Before:
package com.formulasearchengine.mathosphere.mlp.cli;
import org.junit.Test;
import static org.junit.Assert.*;
public class CliParamsTest {
@Test
public void help() {
String[] args = {"help"};
CliParams params = CliParams.from(args);
assertEquals("help", params.getCommand());
}
@Test
public void useTex() {
String[] args = {"mlp", "--tex"};
CliParams params = CliParams.from(args);
assertEquals("mlp", params.getCommand());
assertTrue(params.getMlpCommandConfig().getUseTeXIdentifiers());
}
@Test
public void notUseTex() {
String[] args = {"mlp"};
CliParams params = CliParams.from(args);
assertEquals("mlp", params.getCommand());
assertFalse(params.getMlpCommandConfig().getUseTeXIdentifiers());
}
@Test
public void count() {
String[] args = {"count", "--formulae", "-in", "c:/tmp/mlp/input/"};
CliParams params = CliParams.from(args);
assertEquals("count", params.getCommand());
CountCommandConfig count = params.getCount();
assertTrue(count.isFormulas());
assertFalse(count.isDefinitions());
assertFalse(count.isIdentifiers());
assertEquals("c:/tmp/mlp/input/", count.getInput());
}
}
## Instruction:
Test setting texvcinfo url from cli
## Code After:
package com.formulasearchengine.mathosphere.mlp.cli;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class CliParamsTest {
@Test
public void help() {
String[] args = {"help"};
CliParams params = CliParams.from(args);
assertEquals("help", params.getCommand());
}
@Test
public void texvcinfo() {
String[] args = {"tags", "--texvcinfo", "expected"};
CliParams params = CliParams.from(args);
assertEquals("expected", params.getTagsCommandConfig().getTexvcinfoUrl());
}
@Test
public void useTex() {
String[] args = {"mlp", "--tex"};
CliParams params = CliParams.from(args);
assertEquals("mlp", params.getCommand());
assertTrue(params.getMlpCommandConfig().getUseTeXIdentifiers());
}
@Test
public void notUseTex() {
String[] args = {"mlp"};
CliParams params = CliParams.from(args);
assertEquals("mlp", params.getCommand());
assertFalse(params.getMlpCommandConfig().getUseTeXIdentifiers());
}
@Test
public void count() {
String[] args = {"count", "--formulae", "-in", "c:/tmp/mlp/input/"};
CliParams params = CliParams.from(args);
assertEquals("count", params.getCommand());
CountCommandConfig count = params.getCount();
assertTrue(count.isFormulas());
assertFalse(count.isDefinitions());
assertFalse(count.isIdentifiers());
assertEquals("c:/tmp/mlp/input/", count.getInput());
}
}
|
...
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class CliParamsTest {
...
String[] args = {"help"};
CliParams params = CliParams.from(args);
assertEquals("help", params.getCommand());
}
@Test
public void texvcinfo() {
String[] args = {"tags", "--texvcinfo", "expected"};
CliParams params = CliParams.from(args);
assertEquals("expected", params.getTagsCommandConfig().getTexvcinfoUrl());
}
@Test
...
|
9887b962ddc27f7bebe212e169d1a2c442a35239
|
ironic_ui/content/ironic/panel.py
|
ironic_ui/content/ironic/panel.py
|
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.api import base
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin',)
def allowed(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return super(Ironic, self).allowed(context)
def nav(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return True
dashboard.Admin.register(Ironic)
|
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin', 'openstack.services.baremetal',)
dashboard.Admin.register(Ironic)
|
Use permissions attribute to detect ironic service
|
Use permissions attribute to detect ironic service
Horizon implements a logic to enable/disable panel by permissions
defined in each panel class. This change replaces the current redundant
logic by that built-in feature to simplify how we define requirements
of the Ironic panels.
Change-Id: I4a9dabfea79c23155fb8986fe8386202d7474485
|
Python
|
apache-2.0
|
openstack/ironic-ui,openstack/ironic-ui,openstack/ironic-ui,openstack/ironic-ui
|
python
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.api import base
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin',)
def allowed(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return super(Ironic, self).allowed(context)
def nav(self, context):
request = context['request']
if not base.is_service_enabled(request, 'baremetal'):
return False
else:
return True
dashboard.Admin.register(Ironic)
## Instruction:
Use permissions attribute to detect ironic service
Horizon implements a logic to enable/disable panel by permissions
defined in each panel class. This change replaces the current redundant
logic by that built-in feature to simplify how we define requirements
of the Ironic panels.
Change-Id: I4a9dabfea79c23155fb8986fe8386202d7474485
## Code After:
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.admin import dashboard
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin', 'openstack.services.baremetal',)
dashboard.Admin.register(Ironic)
|
// ... existing code ...
import horizon
from openstack_dashboard.dashboards.admin import dashboard
// ... modified code ...
class Ironic(horizon.Panel):
name = _("Ironic Bare Metal Provisioning")
slug = 'ironic'
permissions = ('openstack.roles.admin', 'openstack.services.baremetal',)
dashboard.Admin.register(Ironic)
// ... rest of the code ...
|
1e0998cf1d80b72eafd1639c839c86ac6f38a6ef
|
test/util/SeqUtils.h
|
test/util/SeqUtils.h
|
namespace rc {
namespace test {
//! Forwards Seq a random amount and copies it to see if it is equal to the
//! original.
template<typename T>
void assertEqualCopies(Seq<T> seq)
{
std::size_t len = seq::length(seq);
std::size_t n = *gen::ranged<std::size_t>(0, len * 2);
while (n--)
seq.next();
const auto copy = seq;
RC_ASSERT(copy == seq);
}
} // namespace test
} // namespace rc
|
namespace rc {
namespace test {
//! Forwards Seq a random amount and copies it to see if it is equal to the
//! original. Must not be infinite, of course.
template<typename T>
void assertEqualCopies(Seq<T> seq)
{
std::size_t len = seq::length(seq);
std::size_t n = *gen::ranged<std::size_t>(0, len * 2);
while (n--)
seq.next();
const auto copy = seq;
RC_ASSERT(copy == seq);
}
} // namespace test
} // namespace rc
|
Add clarifying comment for assertEqualCopies
|
Add clarifying comment for assertEqualCopies
|
C
|
bsd-2-clause
|
whoshuu/rapidcheck,tm604/rapidcheck,whoshuu/rapidcheck,emil-e/rapidcheck,tm604/rapidcheck,emil-e/rapidcheck,unapiedra/rapidfuzz,emil-e/rapidcheck,whoshuu/rapidcheck,tm604/rapidcheck,unapiedra/rapidfuzz,unapiedra/rapidfuzz
|
c
|
## Code Before:
namespace rc {
namespace test {
//! Forwards Seq a random amount and copies it to see if it is equal to the
//! original.
template<typename T>
void assertEqualCopies(Seq<T> seq)
{
std::size_t len = seq::length(seq);
std::size_t n = *gen::ranged<std::size_t>(0, len * 2);
while (n--)
seq.next();
const auto copy = seq;
RC_ASSERT(copy == seq);
}
} // namespace test
} // namespace rc
## Instruction:
Add clarifying comment for assertEqualCopies
## Code After:
namespace rc {
namespace test {
//! Forwards Seq a random amount and copies it to see if it is equal to the
//! original. Must not be infinite, of course.
template<typename T>
void assertEqualCopies(Seq<T> seq)
{
std::size_t len = seq::length(seq);
std::size_t n = *gen::ranged<std::size_t>(0, len * 2);
while (n--)
seq.next();
const auto copy = seq;
RC_ASSERT(copy == seq);
}
} // namespace test
} // namespace rc
|
// ... existing code ...
namespace test {
//! Forwards Seq a random amount and copies it to see if it is equal to the
//! original. Must not be infinite, of course.
template<typename T>
void assertEqualCopies(Seq<T> seq)
{
// ... rest of the code ...
|
ea8cbcaf41f01a46390882fbc99e6e14d70a49d1
|
src/mmw/apps/user/models.py
|
src/mmw/apps/user/models.py
|
from django.contrib.auth.models import User
from django.db import models
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
|
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
"""
Create an auth token for every newly created user.
"""
if created:
Token.objects.create(user=instance)
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
|
Create an API auth token for every newly created user
|
Create an API auth token for every newly created user
* Add a post_save signal to add a new authtoken for every new user. For use with
the Geoprocessing API
|
Python
|
apache-2.0
|
WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed
|
python
|
## Code Before:
from django.contrib.auth.models import User
from django.db import models
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
## Instruction:
Create an API auth token for every newly created user
* Add a post_save signal to add a new authtoken for every new user. For use with
the Geoprocessing API
## Code After:
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
"""
Create an auth token for every newly created user.
"""
if created:
Token.objects.create(user=instance)
class ItsiUserManager(models.Manager):
def create_itsi_user(self, user, itsi_id):
itsi_user = self.create(user=user, itsi_id=itsi_id)
return itsi_user
class ItsiUser(models.Model):
user = models.OneToOneField(User, primary_key=True)
itsi_id = models.IntegerField()
objects = ItsiUserManager()
def __unicode__(self):
return unicode(self.user.username)
|
# ... existing code ...
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
"""
Create an auth token for every newly created user.
"""
if created:
Token.objects.create(user=instance)
class ItsiUserManager(models.Manager):
# ... rest of the code ...
|
ee2ec81c8667f117fdbd4e3c02dce9170eefd221
|
VYNFCKit/VYNFCNDEFPayloadParser.h
|
VYNFCKit/VYNFCNDEFPayloadParser.h
|
//
// VYNFCNDEFPayloadParser.h
// VYNFCKit
//
// Created by Vince Yuan on 7/8/17.
// Copyright © 2017 Vince Yuan. All rights reserved.
//
// This source code is licensed under the MIT-style license found in the
// LICENSE file in the root directory of this source tree.
//
#import <Foundation/Foundation.h>
@class NFCNDEFPayload, VYNFCNDEFMessageHeader;
@interface VYNFCNDEFPayloadParser : NSObject
+ (nullable id)parse:(nullable NFCNDEFPayload *)payload;
+ (nullable VYNFCNDEFMessageHeader *)parseMessageHeader:(nullable unsigned char*)payloadBytes length:(NSUInteger)length;
@end
|
//
// VYNFCNDEFPayloadParser.h
// VYNFCKit
//
// Created by Vince Yuan on 7/8/17.
// Copyright © 2017 Vince Yuan. All rights reserved.
//
// This source code is licensed under the MIT-style license found in the
// LICENSE file in the root directory of this source tree.
//
#import <Foundation/Foundation.h>
@class NFCNDEFPayload, VYNFCNDEFMessageHeader;
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(watchos, macos, tvos)
@interface VYNFCNDEFPayloadParser : NSObject
+ (nullable id)parse:(nullable NFCNDEFPayload *)payload;
+ (nullable VYNFCNDEFMessageHeader *)parseMessageHeader:(nullable unsigned char*)payloadBytes length:(NSUInteger)length;
@end
|
Remove partial availability warning by adding API_AVAILABLE.
|
Remove partial availability warning by adding API_AVAILABLE.
|
C
|
mit
|
vinceyuan/VYNFCKit,vinceyuan/VYNFCKit
|
c
|
## Code Before:
//
// VYNFCNDEFPayloadParser.h
// VYNFCKit
//
// Created by Vince Yuan on 7/8/17.
// Copyright © 2017 Vince Yuan. All rights reserved.
//
// This source code is licensed under the MIT-style license found in the
// LICENSE file in the root directory of this source tree.
//
#import <Foundation/Foundation.h>
@class NFCNDEFPayload, VYNFCNDEFMessageHeader;
@interface VYNFCNDEFPayloadParser : NSObject
+ (nullable id)parse:(nullable NFCNDEFPayload *)payload;
+ (nullable VYNFCNDEFMessageHeader *)parseMessageHeader:(nullable unsigned char*)payloadBytes length:(NSUInteger)length;
@end
## Instruction:
Remove partial availability warning by adding API_AVAILABLE.
## Code After:
//
// VYNFCNDEFPayloadParser.h
// VYNFCKit
//
// Created by Vince Yuan on 7/8/17.
// Copyright © 2017 Vince Yuan. All rights reserved.
//
// This source code is licensed under the MIT-style license found in the
// LICENSE file in the root directory of this source tree.
//
#import <Foundation/Foundation.h>
@class NFCNDEFPayload, VYNFCNDEFMessageHeader;
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(watchos, macos, tvos)
@interface VYNFCNDEFPayloadParser : NSObject
+ (nullable id)parse:(nullable NFCNDEFPayload *)payload;
+ (nullable VYNFCNDEFMessageHeader *)parseMessageHeader:(nullable unsigned char*)payloadBytes length:(NSUInteger)length;
@end
|
# ... existing code ...
@class NFCNDEFPayload, VYNFCNDEFMessageHeader;
API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(watchos, macos, tvos)
@interface VYNFCNDEFPayloadParser : NSObject
+ (nullable id)parse:(nullable NFCNDEFPayload *)payload;
# ... rest of the code ...
|
e53d324f2ac4874c1f56bbf00dfee47c6b059e5d
|
fluidreview/admin.py
|
fluidreview/admin.py
|
"""Admin interface for fluidreview"""
from django.contrib import admin
from bootcamp.utils import get_field_names
from fluidreview.models import WebhookRequest, OAuthToken
class WebhookRequestAdmin(admin.ModelAdmin):
"""Admin for WebhookRequest"""
model = WebhookRequest
readonly_fields = get_field_names(WebhookRequest)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class OAuthTokenAdmin(admin.ModelAdmin):
"""Admin for OAuthToken"""
model = OAuthToken
admin.site.register(WebhookRequest, WebhookRequestAdmin)
admin.site.register(OAuthToken, OAuthTokenAdmin)
|
"""Admin interface for fluidreview"""
from django.contrib import admin
from bootcamp.utils import get_field_names
from fluidreview.models import WebhookRequest, OAuthToken
class WebhookRequestAdmin(admin.ModelAdmin):
"""Admin for WebhookRequest"""
model = WebhookRequest
readonly_fields = get_field_names(WebhookRequest)
ordering = ('-created_on',)
list_filter = ('award_id', 'status')
search_fields = ('user_email', 'user_id', 'submission_id')
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class OAuthTokenAdmin(admin.ModelAdmin):
"""Admin for OAuthToken"""
model = OAuthToken
admin.site.register(WebhookRequest, WebhookRequestAdmin)
admin.site.register(OAuthToken, OAuthTokenAdmin)
|
Sort webhook requests by date
|
Sort webhook requests by date
|
Python
|
bsd-3-clause
|
mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce
|
python
|
## Code Before:
"""Admin interface for fluidreview"""
from django.contrib import admin
from bootcamp.utils import get_field_names
from fluidreview.models import WebhookRequest, OAuthToken
class WebhookRequestAdmin(admin.ModelAdmin):
"""Admin for WebhookRequest"""
model = WebhookRequest
readonly_fields = get_field_names(WebhookRequest)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class OAuthTokenAdmin(admin.ModelAdmin):
"""Admin for OAuthToken"""
model = OAuthToken
admin.site.register(WebhookRequest, WebhookRequestAdmin)
admin.site.register(OAuthToken, OAuthTokenAdmin)
## Instruction:
Sort webhook requests by date
## Code After:
"""Admin interface for fluidreview"""
from django.contrib import admin
from bootcamp.utils import get_field_names
from fluidreview.models import WebhookRequest, OAuthToken
class WebhookRequestAdmin(admin.ModelAdmin):
"""Admin for WebhookRequest"""
model = WebhookRequest
readonly_fields = get_field_names(WebhookRequest)
ordering = ('-created_on',)
list_filter = ('award_id', 'status')
search_fields = ('user_email', 'user_id', 'submission_id')
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class OAuthTokenAdmin(admin.ModelAdmin):
"""Admin for OAuthToken"""
model = OAuthToken
admin.site.register(WebhookRequest, WebhookRequestAdmin)
admin.site.register(OAuthToken, OAuthTokenAdmin)
|
...
"""Admin for WebhookRequest"""
model = WebhookRequest
readonly_fields = get_field_names(WebhookRequest)
ordering = ('-created_on',)
list_filter = ('award_id', 'status')
search_fields = ('user_email', 'user_id', 'submission_id')
def has_add_permission(self, request):
return False
...
|
e53c66f9ab12fe0c90c447176b083513cd3a4cf5
|
store/urls.py
|
store/urls.py
|
from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
url(r'^review/$', product_review, name='review')
]
|
from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^review/$', product_review, name='review'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
]
|
Move products:review URLConf above product:detail
|
Move products:review URLConf above product:detail
The product:detail view is greedy and previously caused the review
URLConf never to be resolved by the correct view
|
Python
|
bsd-3-clause
|
kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop
|
python
|
## Code Before:
from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
url(r'^review/$', product_review, name='review')
]
## Instruction:
Move products:review URLConf above product:detail
The product:detail view is greedy and previously caused the review
URLConf never to be resolved by the correct view
## Code After:
from django.conf.urls import url
from .views import ProductCatalogue, ProductDetail, product_review
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^review/$', product_review, name='review'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
]
|
# ... existing code ...
urlpatterns = [
url(r'^catalogue/$', ProductCatalogue.as_view(), name='catalogue'),
url(r'^review/$', product_review, name='review'),
url(r'^(?P<slug>[\w\-]+)/$', ProductDetail.as_view(), name='detail'),
]
# ... rest of the code ...
|
c7de0de0532224c3974430fb489682066f579d4a
|
requirementsascodeexamples/helloworld/src/main/java/helloworld/HelloWorld03_EnterNameExample.java
|
requirementsascodeexamples/helloworld/src/main/java/helloworld/HelloWorld03_EnterNameExample.java
|
package helloworld;
import static org.requirementsascode.UseCaseModelBuilder.newBuilder;
import java.util.function.Consumer;
import org.requirementsascode.UseCaseModel;
import org.requirementsascode.UseCaseModelBuilder;
import org.requirementsascode.UseCaseModelRunner;
public class HelloWorld03_EnterNameExample extends AbstractHelloWorldExample{
private static final Class<EnterText> ENTER_FIRST_NAME = EnterText.class;
public UseCaseModel buildWith(UseCaseModelBuilder modelBuilder) {
UseCaseModel useCaseModel =
modelBuilder.useCase("Get greeted")
.basicFlow()
.step("S1").system(promptUserToEnterFirstName())
.step("S2").user(ENTER_FIRST_NAME).system(greetUserWithFirstName())
.build();
return useCaseModel;
}
private Consumer<UseCaseModelRunner> promptUserToEnterFirstName() {
return r -> System.out.print("Please enter your first name: ");
}
private Consumer<EnterText> greetUserWithFirstName() {
return enterText -> System.out.println("Hello, " + enterText.text + ".");
}
public static void main(String[] args){
HelloWorld03_EnterNameExample example = new HelloWorld03_EnterNameExample();
example.start();
}
private void start() {
UseCaseModelRunner useCaseModelRunner = new UseCaseModelRunner();
UseCaseModel useCaseModel = buildWith(newBuilder());
useCaseModelRunner.run(useCaseModel);
useCaseModelRunner.reactTo(enterText());
}
}
|
package helloworld;
import static org.requirementsascode.UseCaseModelBuilder.newBuilder;
import org.requirementsascode.UseCaseModel;
import org.requirementsascode.UseCaseModelBuilder;
import org.requirementsascode.UseCaseModelRunner;
public class HelloWorld03_EnterNameExample extends AbstractHelloWorldExample{
private static final Class<EnterText> ENTER_FIRST_NAME = EnterText.class;
public UseCaseModel buildWith(UseCaseModelBuilder modelBuilder) {
UseCaseModel useCaseModel =
modelBuilder.useCase("Get greeted")
.basicFlow()
.step("S1").system(this::promptUserToEnterFirstName)
.step("S2").user(ENTER_FIRST_NAME).system(this::greetUserWithFirstName)
.build();
return useCaseModel;
}
private void promptUserToEnterFirstName(UseCaseModelRunner runner) {
System.out.print("Please enter your first name: ");
}
private void greetUserWithFirstName(EnterText enterText) {
System.out.println("Hello, " + enterText.text + ".");
}
public static void main(String[] args){
HelloWorld03_EnterNameExample example = new HelloWorld03_EnterNameExample();
example.start();
}
private void start() {
UseCaseModelRunner useCaseModelRunner = new UseCaseModelRunner();
UseCaseModel useCaseModel = buildWith(newBuilder());
useCaseModelRunner.run(useCaseModel);
useCaseModelRunner.reactTo(enterText());
}
}
|
Refactor to use method references
|
Refactor to use method references
|
Java
|
apache-2.0
|
bertilmuth/requirementsascode
|
java
|
## Code Before:
package helloworld;
import static org.requirementsascode.UseCaseModelBuilder.newBuilder;
import java.util.function.Consumer;
import org.requirementsascode.UseCaseModel;
import org.requirementsascode.UseCaseModelBuilder;
import org.requirementsascode.UseCaseModelRunner;
public class HelloWorld03_EnterNameExample extends AbstractHelloWorldExample{
private static final Class<EnterText> ENTER_FIRST_NAME = EnterText.class;
public UseCaseModel buildWith(UseCaseModelBuilder modelBuilder) {
UseCaseModel useCaseModel =
modelBuilder.useCase("Get greeted")
.basicFlow()
.step("S1").system(promptUserToEnterFirstName())
.step("S2").user(ENTER_FIRST_NAME).system(greetUserWithFirstName())
.build();
return useCaseModel;
}
private Consumer<UseCaseModelRunner> promptUserToEnterFirstName() {
return r -> System.out.print("Please enter your first name: ");
}
private Consumer<EnterText> greetUserWithFirstName() {
return enterText -> System.out.println("Hello, " + enterText.text + ".");
}
public static void main(String[] args){
HelloWorld03_EnterNameExample example = new HelloWorld03_EnterNameExample();
example.start();
}
private void start() {
UseCaseModelRunner useCaseModelRunner = new UseCaseModelRunner();
UseCaseModel useCaseModel = buildWith(newBuilder());
useCaseModelRunner.run(useCaseModel);
useCaseModelRunner.reactTo(enterText());
}
}
## Instruction:
Refactor to use method references
## Code After:
package helloworld;
import static org.requirementsascode.UseCaseModelBuilder.newBuilder;
import org.requirementsascode.UseCaseModel;
import org.requirementsascode.UseCaseModelBuilder;
import org.requirementsascode.UseCaseModelRunner;
public class HelloWorld03_EnterNameExample extends AbstractHelloWorldExample{
private static final Class<EnterText> ENTER_FIRST_NAME = EnterText.class;
public UseCaseModel buildWith(UseCaseModelBuilder modelBuilder) {
UseCaseModel useCaseModel =
modelBuilder.useCase("Get greeted")
.basicFlow()
.step("S1").system(this::promptUserToEnterFirstName)
.step("S2").user(ENTER_FIRST_NAME).system(this::greetUserWithFirstName)
.build();
return useCaseModel;
}
private void promptUserToEnterFirstName(UseCaseModelRunner runner) {
System.out.print("Please enter your first name: ");
}
private void greetUserWithFirstName(EnterText enterText) {
System.out.println("Hello, " + enterText.text + ".");
}
public static void main(String[] args){
HelloWorld03_EnterNameExample example = new HelloWorld03_EnterNameExample();
example.start();
}
private void start() {
UseCaseModelRunner useCaseModelRunner = new UseCaseModelRunner();
UseCaseModel useCaseModel = buildWith(newBuilder());
useCaseModelRunner.run(useCaseModel);
useCaseModelRunner.reactTo(enterText());
}
}
|
// ... existing code ...
package helloworld;
import static org.requirementsascode.UseCaseModelBuilder.newBuilder;
import org.requirementsascode.UseCaseModel;
import org.requirementsascode.UseCaseModelBuilder;
// ... modified code ...
UseCaseModel useCaseModel =
modelBuilder.useCase("Get greeted")
.basicFlow()
.step("S1").system(this::promptUserToEnterFirstName)
.step("S2").user(ENTER_FIRST_NAME).system(this::greetUserWithFirstName)
.build();
return useCaseModel;
}
private void promptUserToEnterFirstName(UseCaseModelRunner runner) {
System.out.print("Please enter your first name: ");
}
private void greetUserWithFirstName(EnterText enterText) {
System.out.println("Hello, " + enterText.text + ".");
}
public static void main(String[] args){
// ... rest of the code ...
|
c721ba7badc0b980d9c58822b5c0b626b1321f1a
|
grokapi/cli.py
|
grokapi/cli.py
|
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=True,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
default="en",
required=True,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
default="en",
required=True,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
|
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=False,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
required=False,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
required=False,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
|
Fix default values of Argument Parser
|
Fix default values of Argument Parser
|
Python
|
mit
|
Commonists/Grokapi
|
python
|
## Code Before:
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=True,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
default="en",
required=True,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
default="en",
required=True,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
## Instruction:
Fix default values of Argument Parser
## Code After:
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=False,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
required=False,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
required=False,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
|
// ... existing code ...
type=str,
dest="lang",
default="en",
required=False,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
required=False,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
required=False,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
// ... rest of the code ...
|
21a4c6c5cdf3461ef2bd6048a7399044e8b1a0e8
|
spyder_unittest/backend/pytestworker.py
|
spyder_unittest/backend/pytestworker.py
|
# Standard library imports
import sys
# Third party imports
import pytest
pytest.main(sys.argv[1:])
|
# Standard library imports
import sys
# Third party imports
import pytest
class SpyderPlugin():
"""Pytest plugin which reports in format suitable for Spyder."""
def pytest_itemcollected(self, item):
"""Called by py.test when a test item is collected."""
name = item.name
module = item.parent.name
module = module.replace('/', '.') # convert path to dotted path
if module.endswith('.py'):
module = module[:-3]
print('pytest_item_collected(name={}, module={})'.format(name, module))
pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
|
Add py.test plugin which prints out test names as they are collected
|
Add py.test plugin which prints out test names as they are collected
|
Python
|
mit
|
jitseniesen/spyder-unittest
|
python
|
## Code Before:
# Standard library imports
import sys
# Third party imports
import pytest
pytest.main(sys.argv[1:])
## Instruction:
Add py.test plugin which prints out test names as they are collected
## Code After:
# Standard library imports
import sys
# Third party imports
import pytest
class SpyderPlugin():
"""Pytest plugin which reports in format suitable for Spyder."""
def pytest_itemcollected(self, item):
"""Called by py.test when a test item is collected."""
name = item.name
module = item.parent.name
module = module.replace('/', '.') # convert path to dotted path
if module.endswith('.py'):
module = module[:-3]
print('pytest_item_collected(name={}, module={})'.format(name, module))
pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
|
// ... existing code ...
# Third party imports
import pytest
class SpyderPlugin():
"""Pytest plugin which reports in format suitable for Spyder."""
def pytest_itemcollected(self, item):
"""Called by py.test when a test item is collected."""
name = item.name
module = item.parent.name
module = module.replace('/', '.') # convert path to dotted path
if module.endswith('.py'):
module = module[:-3]
print('pytest_item_collected(name={}, module={})'.format(name, module))
pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
// ... rest of the code ...
|
c2598058722531662aab8831640fc367689d2a43
|
tests/utils/test_process_word_vectors.py
|
tests/utils/test_process_word_vectors.py
|
import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://s3-us-west-1.amazonaws.com/fasttext-vectors/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
|
import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://dl.fbaipublicfiles.com/fasttext/vectors-wiki/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
|
Update Fasttext pretrained vectors location
|
Update Fasttext pretrained vectors location
|
Python
|
mit
|
lvapeab/nmt-keras,lvapeab/nmt-keras
|
python
|
## Code Before:
import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://s3-us-west-1.amazonaws.com/fasttext-vectors/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
## Instruction:
Update Fasttext pretrained vectors location
## Code After:
import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://dl.fbaipublicfiles.com/fasttext/vectors-wiki/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
|
# ... existing code ...
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://dl.fbaipublicfiles.com/fasttext/vectors-wiki/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
# ... rest of the code ...
|
067b557258a85945635a880ced65454cfa2b61af
|
supermega/tests/test_session.py
|
supermega/tests/test_session.py
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
Add test for key derivation
|
Add test for key derivation
|
Python
|
bsd-3-clause
|
lmb/Supermega
|
python
|
## Code Before:
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
## Instruction:
Add test for key derivation
## Code After:
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
# ... existing code ...
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
# ... rest of the code ...
|
7635dd48e94cb1a128b95a5237dc289f1f65964c
|
django_digest/test/__init__.py
|
django_digest/test/__init__.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
import django.test
from django_digest.test.methods.basic import BasicAuth
from django_digest.test.methods.detect import DetectAuth
from django_digest.test.methods.digest import DigestAuth
class Client(django.test.Client):
AUTH_METHODS = {'Basic': BasicAuth,
'Digest': DigestAuth}
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self.clear_authorization()
def request(self, **request):
if self.auth_method:
request.update(self.auth_method(request))
response = super(Client, self).request(**request)
if response.status_code == 401 and self.auth_method:
# Try to authenticate
request.update(self.auth_method(request, response))
response = super(Client, self).request(**request)
return response
def set_authorization(self, username, password, method=None):
self.username = username
self.password = password
if method is None:
self.auth_method = DetectAuth(client=self,
username=username,
password=password)
else:
self.auth_method = self.AUTH_METHODS[method](username=username,
password=password)
def clear_authorization(self):
self.username = None
self.password = None
self.auth_method = None
|
from __future__ import absolute_import
from __future__ import unicode_literals
import django.test
from django_digest.test.methods.basic import BasicAuth
from django_digest.test.methods.detect import DetectAuth
from django_digest.test.methods.digest import DigestAuth
class Client(django.test.Client):
AUTH_METHODS = {'Basic': BasicAuth,
'Digest': DigestAuth}
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self.clear_authorization()
def request(self, **request):
if self.auth_method:
request.update(self.auth_method(request))
# This payload object can only be read once. Since digest auth involves
# two requests, refresh it for the second "request"
payload = request['wsgi.input'].read()
request['wsgi.input'] = django.test.client.FakePayload(payload)
response = super(Client, self).request(**request)
if response.status_code == 401 and self.auth_method:
# Try to authenticate
request.update(self.auth_method(request, response))
request['wsgi.input'] = django.test.client.FakePayload(payload)
response = super(Client, self).request(**request)
return response
def set_authorization(self, username, password, method=None):
self.username = username
self.password = password
if method is None:
self.auth_method = DetectAuth(client=self,
username=username,
password=password)
else:
self.auth_method = self.AUTH_METHODS[method](username=username,
password=password)
def clear_authorization(self):
self.username = None
self.password = None
self.auth_method = None
|
Reset input for second "request"
|
Reset input for second "request"
|
Python
|
bsd-3-clause
|
dimagi/django-digest
|
python
|
## Code Before:
from __future__ import absolute_import
from __future__ import unicode_literals
import django.test
from django_digest.test.methods.basic import BasicAuth
from django_digest.test.methods.detect import DetectAuth
from django_digest.test.methods.digest import DigestAuth
class Client(django.test.Client):
AUTH_METHODS = {'Basic': BasicAuth,
'Digest': DigestAuth}
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self.clear_authorization()
def request(self, **request):
if self.auth_method:
request.update(self.auth_method(request))
response = super(Client, self).request(**request)
if response.status_code == 401 and self.auth_method:
# Try to authenticate
request.update(self.auth_method(request, response))
response = super(Client, self).request(**request)
return response
def set_authorization(self, username, password, method=None):
self.username = username
self.password = password
if method is None:
self.auth_method = DetectAuth(client=self,
username=username,
password=password)
else:
self.auth_method = self.AUTH_METHODS[method](username=username,
password=password)
def clear_authorization(self):
self.username = None
self.password = None
self.auth_method = None
## Instruction:
Reset input for second "request"
## Code After:
from __future__ import absolute_import
from __future__ import unicode_literals
import django.test
from django_digest.test.methods.basic import BasicAuth
from django_digest.test.methods.detect import DetectAuth
from django_digest.test.methods.digest import DigestAuth
class Client(django.test.Client):
AUTH_METHODS = {'Basic': BasicAuth,
'Digest': DigestAuth}
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self.clear_authorization()
def request(self, **request):
if self.auth_method:
request.update(self.auth_method(request))
# This payload object can only be read once. Since digest auth involves
# two requests, refresh it for the second "request"
payload = request['wsgi.input'].read()
request['wsgi.input'] = django.test.client.FakePayload(payload)
response = super(Client, self).request(**request)
if response.status_code == 401 and self.auth_method:
# Try to authenticate
request.update(self.auth_method(request, response))
request['wsgi.input'] = django.test.client.FakePayload(payload)
response = super(Client, self).request(**request)
return response
def set_authorization(self, username, password, method=None):
self.username = username
self.password = password
if method is None:
self.auth_method = DetectAuth(client=self,
username=username,
password=password)
else:
self.auth_method = self.AUTH_METHODS[method](username=username,
password=password)
def clear_authorization(self):
self.username = None
self.password = None
self.auth_method = None
|
...
from django_digest.test.methods.basic import BasicAuth
from django_digest.test.methods.detect import DetectAuth
from django_digest.test.methods.digest import DigestAuth
class Client(django.test.Client):
AUTH_METHODS = {'Basic': BasicAuth,
...
def request(self, **request):
if self.auth_method:
request.update(self.auth_method(request))
# This payload object can only be read once. Since digest auth involves
# two requests, refresh it for the second "request"
payload = request['wsgi.input'].read()
request['wsgi.input'] = django.test.client.FakePayload(payload)
response = super(Client, self).request(**request)
if response.status_code == 401 and self.auth_method:
# Try to authenticate
request.update(self.auth_method(request, response))
request['wsgi.input'] = django.test.client.FakePayload(payload)
response = super(Client, self).request(**request)
return response
...
|
7e1ec1b27d69882005ac5492809c8847c21e2198
|
baro.py
|
baro.py
|
from datetime import datetime
class Baro:
"""This class represents a Baro item and is initialized with
data in JSON format
"""
def __init__(self, data):
self.config = data['Config']
self.start = datetime.fromtimestamp(data['Activation']['sec'])
self.end = datetime.fromtimestamp(data['Expiry']['sec'])
self.location = data['Node']
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro offer
"""
baroItemString = ""
if datetime.now() < self.start:
return "None"
else:
for item in self.manifest:
baroItemString += ('== '+ str(item["ItemType"]) +' ==\n'
'- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' )
return baroItemString
def get_eta_string(self):
"""Returns a string containing the Baro's ETA
"""
seconds = int((self.end - datetime.now()).total_seconds())
return '{} days, {} hrs, {} mins'.format((seconds // 86400), ((seconds % 86400) // 3600),
(seconds % 3600) // 60)
def get_start_string(self):
"""Returns a string containing the Baro's start
"""
seconds = int((self.start - datetime.now()).total_seconds())
return '{} days, {} hrs, {} mins'.format((seconds // 86400), ((seconds % 86400) // 3600),
(seconds % 3600) // 60)
|
from datetime import datetime
import utils
class Baro:
"""This class contains info about the Void Trader and is initialized with
data in JSON format
"""
def __init__(self, data):
self.config = data['Config']
self.start = datetime.fromtimestamp(data['Activation']['sec'])
self.end = datetime.fromtimestamp(data['Expiry']['sec'])
self.location = data['Node']
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro's offers
"""
baroItemString = ""
if datetime.now() < self.start:
return "None"
else:
for item in self.manifest:
baroItemString += ('== '+ str(item["ItemType"]) +' ==\n'
'- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' )
return baroItemString
def get_end_string(self):
"""Returns a string containing Baro's departure time
"""
return timedelta_to_string(self.end - datetime.now())
def get_start_string(self):
"""Returns a string containing Baro's arrival time
"""
return timedelta_to_string(self.start - datetime.now())
|
Change class Baro to use timedelta_to_string, some fixes
|
Change class Baro to use timedelta_to_string, some fixes
|
Python
|
mit
|
pabletos/Hubot-Warframe,pabletos/Hubot-Warframe
|
python
|
## Code Before:
from datetime import datetime
class Baro:
"""This class represents a Baro item and is initialized with
data in JSON format
"""
def __init__(self, data):
self.config = data['Config']
self.start = datetime.fromtimestamp(data['Activation']['sec'])
self.end = datetime.fromtimestamp(data['Expiry']['sec'])
self.location = data['Node']
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro offer
"""
baroItemString = ""
if datetime.now() < self.start:
return "None"
else:
for item in self.manifest:
baroItemString += ('== '+ str(item["ItemType"]) +' ==\n'
'- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' )
return baroItemString
def get_eta_string(self):
"""Returns a string containing the Baro's ETA
"""
seconds = int((self.end - datetime.now()).total_seconds())
return '{} days, {} hrs, {} mins'.format((seconds // 86400), ((seconds % 86400) // 3600),
(seconds % 3600) // 60)
def get_start_string(self):
"""Returns a string containing the Baro's start
"""
seconds = int((self.start - datetime.now()).total_seconds())
return '{} days, {} hrs, {} mins'.format((seconds // 86400), ((seconds % 86400) // 3600),
(seconds % 3600) // 60)
## Instruction:
Change class Baro to use timedelta_to_string, some fixes
## Code After:
from datetime import datetime
import utils
class Baro:
"""This class contains info about the Void Trader and is initialized with
data in JSON format
"""
def __init__(self, data):
self.config = data['Config']
self.start = datetime.fromtimestamp(data['Activation']['sec'])
self.end = datetime.fromtimestamp(data['Expiry']['sec'])
self.location = data['Node']
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro's offers
"""
baroItemString = ""
if datetime.now() < self.start:
return "None"
else:
for item in self.manifest:
baroItemString += ('== '+ str(item["ItemType"]) +' ==\n'
'- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' )
return baroItemString
def get_end_string(self):
"""Returns a string containing Baro's departure time
"""
return timedelta_to_string(self.end - datetime.now())
def get_start_string(self):
"""Returns a string containing Baro's arrival time
"""
return timedelta_to_string(self.start - datetime.now())
|
...
from datetime import datetime
import utils
class Baro:
"""This class contains info about the Void Trader and is initialized with
data in JSON format
"""
...
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro's offers
"""
baroItemString = ""
...
return baroItemString
def get_end_string(self):
"""Returns a string containing Baro's departure time
"""
return timedelta_to_string(self.end - datetime.now())
def get_start_string(self):
"""Returns a string containing Baro's arrival time
"""
return timedelta_to_string(self.start - datetime.now())
...
|
2536840398104b0270ba1156b1ff63cb4ae2a2ef
|
AsynchFilePlugin/AsynchFilePlugin.h
|
AsynchFilePlugin/AsynchFilePlugin.h
|
/* Header file for AsynchFile plugin */
/* module initialization/shutdown */
int asyncFileInit(void);
int asyncFileShutdown(void);
/*** Experimental Asynchronous File I/O ***/
typedef struct {
int sessionID;
void *state;
} AsyncFile;
int asyncFileClose(AsyncFile *f);
int asyncFileOpen(AsyncFile *f, long fileNamePtr, int fileNameSize, int writeFlag, int semaIndex);
int asyncFileRecordSize();
int asyncFileReadResult(AsyncFile *f, long bufferPtr, int bufferSize);
int asyncFileReadStart(AsyncFile *f, int fPosition, int count);
int asyncFileWriteResult(AsyncFile *f);
int asyncFileWriteStart(AsyncFile *f, int fPosition, long bufferPtr, int bufferSize);
|
/* Header file for AsynchFile plugin */
/* module initialization/shutdown */
int asyncFileInit(void);
int asyncFileShutdown(void);
/*** Experimental Asynchronous File I/O ***/
typedef struct {
int sessionID;
void *state;
} AsyncFile;
int asyncFileClose(AsyncFile *f);
int asyncFileOpen(AsyncFile *f, char *fileNamePtr, int fileNameSize, int writeFlag, int semaIndex);
int asyncFileRecordSize();
int asyncFileReadResult(AsyncFile *f, void *bufferPtr, int bufferSize);
int asyncFileReadStart(AsyncFile *f, int fPosition, int count);
int asyncFileWriteResult(AsyncFile *f);
int asyncFileWriteStart(AsyncFile *f, int fPosition, void *bufferPtr, int bufferSize);
|
Update platforms/* with Eliot's fixes for 64-bit clean AsyncPlugin.
|
Update platforms/* with Eliot's fixes for 64-bit clean AsyncPlugin.
The platforms/unix changes are tested. The win32 and Mac OS changes are not tested but I believe them to be correct.
Not applicable to RiscOS.
Updates not applied to iOS.
@eliot - I changed the declarations in Cross to (void *) rather than long to reduce type casts in platforms/[unix|win32|Mac OS].
@esteban - I did not update platforms/iOS in SVN trunk. Please update declarations as needed.
@tim - no change required for RiscOS.
git-svn-id: http://squeakvm.org/svn/squeak/trunk@3178 fa1542d4-bde8-0310-ad64-8ed1123d492a
Former-commit-id: 55982481b07c41df485b88dff5a63920f92525d0
|
C
|
mit
|
bencoman/pharo-vm,peteruhnak/pharo-vm,peteruhnak/pharo-vm,OpenSmalltalk/vm,timfel/squeakvm,OpenSmalltalk/vm,bencoman/pharo-vm,bencoman/pharo-vm,bencoman/pharo-vm,timfel/squeakvm,timfel/squeakvm,bencoman/pharo-vm,bencoman/pharo-vm,peteruhnak/pharo-vm,timfel/squeakvm,peteruhnak/pharo-vm,OpenSmalltalk/vm,bencoman/pharo-vm,OpenSmalltalk/vm,timfel/squeakvm,peteruhnak/pharo-vm,timfel/squeakvm,bencoman/pharo-vm,peteruhnak/pharo-vm,bencoman/pharo-vm,OpenSmalltalk/vm,peteruhnak/pharo-vm,peteruhnak/pharo-vm,timfel/squeakvm,timfel/squeakvm,OpenSmalltalk/vm,OpenSmalltalk/vm,OpenSmalltalk/vm
|
c
|
## Code Before:
/* Header file for AsynchFile plugin */
/* module initialization/shutdown */
int asyncFileInit(void);
int asyncFileShutdown(void);
/*** Experimental Asynchronous File I/O ***/
typedef struct {
int sessionID;
void *state;
} AsyncFile;
int asyncFileClose(AsyncFile *f);
int asyncFileOpen(AsyncFile *f, long fileNamePtr, int fileNameSize, int writeFlag, int semaIndex);
int asyncFileRecordSize();
int asyncFileReadResult(AsyncFile *f, long bufferPtr, int bufferSize);
int asyncFileReadStart(AsyncFile *f, int fPosition, int count);
int asyncFileWriteResult(AsyncFile *f);
int asyncFileWriteStart(AsyncFile *f, int fPosition, long bufferPtr, int bufferSize);
## Instruction:
Update platforms/* with Eliot's fixes for 64-bit clean AsyncPlugin.
The platforms/unix changes are tested. The win32 and Mac OS changes are not tested but I believe them to be correct.
Not applicable to RiscOS.
Updates not applied to iOS.
@eliot - I changed the declarations in Cross to (void *) rather than long to reduce type casts in platforms/[unix|win32|Mac OS].
@esteban - I did not update platforms/iOS in SVN trunk. Please update declarations as needed.
@tim - no change required for RiscOS.
git-svn-id: http://squeakvm.org/svn/squeak/trunk@3178 fa1542d4-bde8-0310-ad64-8ed1123d492a
Former-commit-id: 55982481b07c41df485b88dff5a63920f92525d0
## Code After:
/* Header file for AsynchFile plugin */
/* module initialization/shutdown */
int asyncFileInit(void);
int asyncFileShutdown(void);
/*** Experimental Asynchronous File I/O ***/
typedef struct {
int sessionID;
void *state;
} AsyncFile;
int asyncFileClose(AsyncFile *f);
int asyncFileOpen(AsyncFile *f, char *fileNamePtr, int fileNameSize, int writeFlag, int semaIndex);
int asyncFileRecordSize();
int asyncFileReadResult(AsyncFile *f, void *bufferPtr, int bufferSize);
int asyncFileReadStart(AsyncFile *f, int fPosition, int count);
int asyncFileWriteResult(AsyncFile *f);
int asyncFileWriteStart(AsyncFile *f, int fPosition, void *bufferPtr, int bufferSize);
|
# ... existing code ...
} AsyncFile;
int asyncFileClose(AsyncFile *f);
int asyncFileOpen(AsyncFile *f, char *fileNamePtr, int fileNameSize, int writeFlag, int semaIndex);
int asyncFileRecordSize();
int asyncFileReadResult(AsyncFile *f, void *bufferPtr, int bufferSize);
int asyncFileReadStart(AsyncFile *f, int fPosition, int count);
int asyncFileWriteResult(AsyncFile *f);
int asyncFileWriteStart(AsyncFile *f, int fPosition, void *bufferPtr, int bufferSize);
# ... rest of the code ...
|
5307e9d879a5432db5f54fd61ea0060b6526a1a6
|
sundaytasks/example/test_plugin.py
|
sundaytasks/example/test_plugin.py
|
from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
#print("plugin:",plugin['receiver'])
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
|
from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
|
Clear old method of calling plugins
|
Clear old method of calling plugins
|
Python
|
apache-2.0
|
olafura/sundaytasks-py
|
python
|
## Code Before:
from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
#print("plugin:",plugin['receiver'])
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
## Instruction:
Clear old method of calling plugins
## Code After:
from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
|
# ... existing code ...
@gen.coroutine
def main(plugin):
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
# ... rest of the code ...
|
01aa219b0058cbfbdb96e890f510fa275f3ef790
|
python/Completion.py
|
python/Completion.py
|
import vim, syncrequest, types
class Completion:
def get_completions(self, column, partialWord):
parameters = {}
parameters['column'] = vim.eval(column)
parameters['wordToComplete'] = vim.eval(partialWord)
parameters['WantDocumentationForEveryCompletionResult'] = \
bool(int(vim.eval('g:omnicomplete_fetch_full_documentation')))
want_snippet = \
bool(int(vim.eval('g:OmniSharp_want_snippet')))
parameters['WantSnippet'] = want_snippet
parameters['WantMethodHeader'] = want_snippet
parameters['WantReturnType'] = want_snippet
parameters['buffer'] = '\r\n'.join(vim.eval('s:textBuffer')[:])
response = syncrequest.get_response('/autocomplete', parameters)
enc = vim.eval('&encoding')
vim_completions = []
if response is not None:
for completion in response:
complete = {
'snip': completion['Snippet'] if completion['Snippet'] is not None else '',
'word': completion['MethodHeader'] if completion['MethodHeader'] is not None else completion['CompletionText'],
'menu': completion['ReturnType'] if completion['ReturnType'] is not None else completion['DisplayText'],
'info': completion['Description'].replace('\r\n', '\n') if completion['Description'] is not None else '',
'icase': 1,
'dup':1
}
vim_completions.append(complete)
return vim_completions
|
import vim, syncrequest, types
class Completion:
def get_completions(self, column, partialWord):
parameters = {}
parameters['column'] = vim.eval(column)
parameters['wordToComplete'] = vim.eval(partialWord)
parameters['WantDocumentationForEveryCompletionResult'] = \
bool(int(vim.eval('g:omnicomplete_fetch_full_documentation')))
want_snippet = \
bool(int(vim.eval('g:OmniSharp_want_snippet')))
parameters['WantSnippet'] = want_snippet
parameters['WantMethodHeader'] = want_snippet
parameters['WantReturnType'] = want_snippet
parameters['buffer'] = '\r\n'.join(vim.eval('s:textBuffer')[:])
response = syncrequest.get_response('/autocomplete', parameters)
enc = vim.eval('&encoding')
vim_completions = []
if response is not None:
for completion in response:
complete = {
'snip': completion['Snippet'] or '',
'word': completion['MethodHeader'] or completion['CompletionText'],
'menu': completion['ReturnType'] or completion['DisplayText'],
'info': completion['Description'].replace('\r\n', '\n') or '',
'icase': 1,
'dup':1
}
vim_completions.append(complete)
return vim_completions
|
Use or operator in python to tidy up completion building.
|
Use or operator in python to tidy up completion building.
|
Python
|
mit
|
OmniSharp/omnisharp-vim,OmniSharp/omnisharp-vim,OmniSharp/omnisharp-vim
|
python
|
## Code Before:
import vim, syncrequest, types
class Completion:
def get_completions(self, column, partialWord):
parameters = {}
parameters['column'] = vim.eval(column)
parameters['wordToComplete'] = vim.eval(partialWord)
parameters['WantDocumentationForEveryCompletionResult'] = \
bool(int(vim.eval('g:omnicomplete_fetch_full_documentation')))
want_snippet = \
bool(int(vim.eval('g:OmniSharp_want_snippet')))
parameters['WantSnippet'] = want_snippet
parameters['WantMethodHeader'] = want_snippet
parameters['WantReturnType'] = want_snippet
parameters['buffer'] = '\r\n'.join(vim.eval('s:textBuffer')[:])
response = syncrequest.get_response('/autocomplete', parameters)
enc = vim.eval('&encoding')
vim_completions = []
if response is not None:
for completion in response:
complete = {
'snip': completion['Snippet'] if completion['Snippet'] is not None else '',
'word': completion['MethodHeader'] if completion['MethodHeader'] is not None else completion['CompletionText'],
'menu': completion['ReturnType'] if completion['ReturnType'] is not None else completion['DisplayText'],
'info': completion['Description'].replace('\r\n', '\n') if completion['Description'] is not None else '',
'icase': 1,
'dup':1
}
vim_completions.append(complete)
return vim_completions
## Instruction:
Use or operator in python to tidy up completion building.
## Code After:
import vim, syncrequest, types
class Completion:
def get_completions(self, column, partialWord):
parameters = {}
parameters['column'] = vim.eval(column)
parameters['wordToComplete'] = vim.eval(partialWord)
parameters['WantDocumentationForEveryCompletionResult'] = \
bool(int(vim.eval('g:omnicomplete_fetch_full_documentation')))
want_snippet = \
bool(int(vim.eval('g:OmniSharp_want_snippet')))
parameters['WantSnippet'] = want_snippet
parameters['WantMethodHeader'] = want_snippet
parameters['WantReturnType'] = want_snippet
parameters['buffer'] = '\r\n'.join(vim.eval('s:textBuffer')[:])
response = syncrequest.get_response('/autocomplete', parameters)
enc = vim.eval('&encoding')
vim_completions = []
if response is not None:
for completion in response:
complete = {
'snip': completion['Snippet'] or '',
'word': completion['MethodHeader'] or completion['CompletionText'],
'menu': completion['ReturnType'] or completion['DisplayText'],
'info': completion['Description'].replace('\r\n', '\n') or '',
'icase': 1,
'dup':1
}
vim_completions.append(complete)
return vim_completions
|
...
if response is not None:
for completion in response:
complete = {
'snip': completion['Snippet'] or '',
'word': completion['MethodHeader'] or completion['CompletionText'],
'menu': completion['ReturnType'] or completion['DisplayText'],
'info': completion['Description'].replace('\r\n', '\n') or '',
'icase': 1,
'dup':1
}
...
|
92438a5450bc644f066a941efe16ec07cf3c129a
|
httoop/codecs/codec.py
|
httoop/codecs/codec.py
|
from httoop.util import Unicode
class Codec(object):
@classmethod
def decode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, bytes):
data = data.decode(charset) if charset is not None else data.decode()
@classmethod
def encode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, Unicode):
data = data.encode(charset) if charset is not None else data.encode()
return data
@classmethod
def iterencode(cls, data, charset=None, mimetype=None): # pragma: no cover
return cls.encode(data, charset, mimetype)
@classmethod
def iterdecode(cls, data, charset=None, mimetype=None): # pragma: no cover
return cls.decode(data, charset, mimetype)
|
from httoop.util import Unicode
class Codec(object):
@classmethod
def decode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, bytes):
data = data.decode(charset or 'ascii')
return data
@classmethod
def encode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, Unicode):
data = data.encode(charset or 'ascii')
return data
@classmethod
def iterencode(cls, data, charset=None, mimetype=None): # pragma: no cover
return cls.encode(data, charset, mimetype)
@classmethod
def iterdecode(cls, data, charset=None, mimetype=None): # pragma: no cover
return cls.decode(data, charset, mimetype)
|
Make encoding and decoding strict
|
Make encoding and decoding strict
* programmers must know what kind of data they use
* don't guess encodings anymore
|
Python
|
mit
|
spaceone/httoop,spaceone/httoop,spaceone/httoop
|
python
|
## Code Before:
from httoop.util import Unicode
class Codec(object):
@classmethod
def decode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, bytes):
data = data.decode(charset) if charset is not None else data.decode()
@classmethod
def encode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, Unicode):
data = data.encode(charset) if charset is not None else data.encode()
return data
@classmethod
def iterencode(cls, data, charset=None, mimetype=None): # pragma: no cover
return cls.encode(data, charset, mimetype)
@classmethod
def iterdecode(cls, data, charset=None, mimetype=None): # pragma: no cover
return cls.decode(data, charset, mimetype)
## Instruction:
Make encoding and decoding strict
* programmers must know what kind of data they use
* don't guess encodings anymore
## Code After:
from httoop.util import Unicode
class Codec(object):
@classmethod
def decode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, bytes):
data = data.decode(charset or 'ascii')
return data
@classmethod
def encode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, Unicode):
data = data.encode(charset or 'ascii')
return data
@classmethod
def iterencode(cls, data, charset=None, mimetype=None): # pragma: no cover
return cls.encode(data, charset, mimetype)
@classmethod
def iterdecode(cls, data, charset=None, mimetype=None): # pragma: no cover
return cls.decode(data, charset, mimetype)
|
# ... existing code ...
@classmethod
def decode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, bytes):
data = data.decode(charset or 'ascii')
return data
@classmethod
def encode(cls, data, charset=None, mimetype=None): # pragma: no cover
if isinstance(data, Unicode):
data = data.encode(charset or 'ascii')
return data
@classmethod
# ... rest of the code ...
|
881d41b2fc465d018a1247419b6a2487c71b88b4
|
pft/tests/test_basics.py
|
pft/tests/test_basics.py
|
"""Basic Unit Tests."""
import unittest
from flask import current_app
from .. import create_app
from ..database import db
class BasicsTestCase(unittest.TestCase):
"""Basic Test Case."""
def setUp(self):
"""Set up tests."""
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Clean up after tests."""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_app_exists(self):
"""Test app exists."""
self.assertFalse(current_app is None)
def test_app_is_testing(self):
"""Test app is testing."""
self.assertTrue(current_app.config['TESTING'])
|
"""Basic Unit Tests."""
import pytest
from flask import current_app
from .. import create_app
from ..database import db
@pytest.fixture(autouse=True)
def initialise_testing_db():
"""Create database before testing, delete after."""
app = create_app('testing')
app_context = app.app_context()
app_context.push()
db.create_all()
yield
db.session.remove()
db.drop_all()
app_context.pop()
def test_app_exists():
"""Test app exists."""
assert current_app is not None
def test_app_is_testing():
"""Test app is testing."""
assert current_app.config['TESTING']
|
Convert basic tests to pytest
|
Convert basic tests to pytest
|
Python
|
unknown
|
gregcowell/BAM,gregcowell/PFT,gregcowell/PFT,gregcowell/BAM
|
python
|
## Code Before:
"""Basic Unit Tests."""
import unittest
from flask import current_app
from .. import create_app
from ..database import db
class BasicsTestCase(unittest.TestCase):
"""Basic Test Case."""
def setUp(self):
"""Set up tests."""
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Clean up after tests."""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_app_exists(self):
"""Test app exists."""
self.assertFalse(current_app is None)
def test_app_is_testing(self):
"""Test app is testing."""
self.assertTrue(current_app.config['TESTING'])
## Instruction:
Convert basic tests to pytest
## Code After:
"""Basic Unit Tests."""
import pytest
from flask import current_app
from .. import create_app
from ..database import db
@pytest.fixture(autouse=True)
def initialise_testing_db():
"""Create database before testing, delete after."""
app = create_app('testing')
app_context = app.app_context()
app_context.push()
db.create_all()
yield
db.session.remove()
db.drop_all()
app_context.pop()
def test_app_exists():
"""Test app exists."""
assert current_app is not None
def test_app_is_testing():
"""Test app is testing."""
assert current_app.config['TESTING']
|
// ... existing code ...
"""Basic Unit Tests."""
import pytest
from flask import current_app
from .. import create_app
from ..database import db
@pytest.fixture(autouse=True)
def initialise_testing_db():
"""Create database before testing, delete after."""
app = create_app('testing')
app_context = app.app_context()
app_context.push()
db.create_all()
yield
db.session.remove()
db.drop_all()
app_context.pop()
def test_app_exists():
"""Test app exists."""
assert current_app is not None
def test_app_is_testing():
"""Test app is testing."""
assert current_app.config['TESTING']
// ... rest of the code ...
|
ccb90932cf967190029b3ce9494a1fd9e6cb889a
|
gaphor/UML/classes/tests/test_propertypages.py
|
gaphor/UML/classes/tests/test_propertypages.py
|
from gi.repository import Gtk
from gaphor import UML
from gaphor.UML.classes import ClassItem
from gaphor.UML.classes.classespropertypages import ClassAttributes
class TestClassPropertyPages:
def test_attribute_editing(self, case):
class_item = case.create(ClassItem, UML.Class)
model = ClassAttributes(class_item, (str, bool, object))
model.append([None, False, None])
path = Gtk.TreePath.new_first()
iter = model.get_iter(path)
model.update(iter, col=0, value="attr")
assert model[iter][-1] is class_item.subject.ownedAttribute[0]
|
from gi.repository import Gtk
from gaphor import UML
from gaphor.UML.classes import ClassItem, EnumerationItem
from gaphor.UML.classes.classespropertypages import (
ClassAttributes,
ClassEnumerationLiterals,
)
def test_attribute_editing(case):
class_item = case.create(ClassItem, UML.Class)
model = ClassAttributes(class_item, (str, bool, object))
model.append([None, False, None])
path = Gtk.TreePath.new_first()
iter = model.get_iter(path)
model.update(iter, col=0, value="attr")
assert model[iter][-1] is class_item.subject.ownedAttribute[0]
def test_enumeration_editing(case):
enum_item = case.create(EnumerationItem, UML.Enumeration)
model = ClassEnumerationLiterals(enum_item, (str, object))
model.append([None, None])
path = Gtk.TreePath.new_first()
iter = model.get_iter(path)
model.update(iter, col=0, value="enum")
assert model[iter][-1] is enum_item.subject.ownedLiteral[0]
|
Add test for enumeration editing
|
Add test for enumeration editing
Signed-off-by: Dan Yeaw <[email protected]>
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
python
|
## Code Before:
from gi.repository import Gtk
from gaphor import UML
from gaphor.UML.classes import ClassItem
from gaphor.UML.classes.classespropertypages import ClassAttributes
class TestClassPropertyPages:
def test_attribute_editing(self, case):
class_item = case.create(ClassItem, UML.Class)
model = ClassAttributes(class_item, (str, bool, object))
model.append([None, False, None])
path = Gtk.TreePath.new_first()
iter = model.get_iter(path)
model.update(iter, col=0, value="attr")
assert model[iter][-1] is class_item.subject.ownedAttribute[0]
## Instruction:
Add test for enumeration editing
Signed-off-by: Dan Yeaw <[email protected]>
## Code After:
from gi.repository import Gtk
from gaphor import UML
from gaphor.UML.classes import ClassItem, EnumerationItem
from gaphor.UML.classes.classespropertypages import (
ClassAttributes,
ClassEnumerationLiterals,
)
def test_attribute_editing(case):
class_item = case.create(ClassItem, UML.Class)
model = ClassAttributes(class_item, (str, bool, object))
model.append([None, False, None])
path = Gtk.TreePath.new_first()
iter = model.get_iter(path)
model.update(iter, col=0, value="attr")
assert model[iter][-1] is class_item.subject.ownedAttribute[0]
def test_enumeration_editing(case):
enum_item = case.create(EnumerationItem, UML.Enumeration)
model = ClassEnumerationLiterals(enum_item, (str, object))
model.append([None, None])
path = Gtk.TreePath.new_first()
iter = model.get_iter(path)
model.update(iter, col=0, value="enum")
assert model[iter][-1] is enum_item.subject.ownedLiteral[0]
|
# ... existing code ...
from gi.repository import Gtk
from gaphor import UML
from gaphor.UML.classes import ClassItem, EnumerationItem
from gaphor.UML.classes.classespropertypages import (
ClassAttributes,
ClassEnumerationLiterals,
)
def test_attribute_editing(case):
class_item = case.create(ClassItem, UML.Class)
model = ClassAttributes(class_item, (str, bool, object))
model.append([None, False, None])
path = Gtk.TreePath.new_first()
iter = model.get_iter(path)
model.update(iter, col=0, value="attr")
assert model[iter][-1] is class_item.subject.ownedAttribute[0]
def test_enumeration_editing(case):
enum_item = case.create(EnumerationItem, UML.Enumeration)
model = ClassEnumerationLiterals(enum_item, (str, object))
model.append([None, None])
path = Gtk.TreePath.new_first()
iter = model.get_iter(path)
model.update(iter, col=0, value="enum")
assert model[iter][-1] is enum_item.subject.ownedLiteral[0]
# ... rest of the code ...
|
e985163d189883a2419e34021971709c9c7498c0
|
request/__init__.py
|
request/__init__.py
|
__version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
|
__version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, Jannis Leidel (jezdez), krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
|
Add jezdez to the authors
|
Add jezdez to the authors
|
Python
|
bsd-2-clause
|
gnublade/django-request,kylef/django-request,kylef/django-request,kylef/django-request,gnublade/django-request,gnublade/django-request
|
python
|
## Code Before:
__version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
## Instruction:
Add jezdez to the authors
## Code After:
__version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, Jannis Leidel (jezdez), krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
|
// ... existing code ...
__version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, Jannis Leidel (jezdez), krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
// ... rest of the code ...
|
0529c392c8c3e75a03aa312e4fc7b367008fdf27
|
tests/test_20_main.py
|
tests/test_20_main.py
|
import click.testing
import pytest
from cfgrib import __main__
def test_main():
runner = click.testing.CliRunner()
res = runner.invoke(__main__.cfgrib_cli, ['selfcheck'])
assert res.exit_code == 0
assert 'Your system is ready.' in res.output
res = runner.invoke(__main__.cfgrib_cli, ['non-existent-command'])
assert res.exit_code == 2
|
import click.testing
from cfgrib import __main__
def test_main():
runner = click.testing.CliRunner()
res = runner.invoke(__main__.cfgrib_cli, ['selfcheck'])
assert res.exit_code == 0
assert 'Your system is ready.' in res.output
res = runner.invoke(__main__.cfgrib_cli, ['non-existent-command'])
assert res.exit_code == 2
|
Fix docs and CLI tests.
|
Fix docs and CLI tests.
|
Python
|
apache-2.0
|
ecmwf/cfgrib
|
python
|
## Code Before:
import click.testing
import pytest
from cfgrib import __main__
def test_main():
runner = click.testing.CliRunner()
res = runner.invoke(__main__.cfgrib_cli, ['selfcheck'])
assert res.exit_code == 0
assert 'Your system is ready.' in res.output
res = runner.invoke(__main__.cfgrib_cli, ['non-existent-command'])
assert res.exit_code == 2
## Instruction:
Fix docs and CLI tests.
## Code After:
import click.testing
from cfgrib import __main__
def test_main():
runner = click.testing.CliRunner()
res = runner.invoke(__main__.cfgrib_cli, ['selfcheck'])
assert res.exit_code == 0
assert 'Your system is ready.' in res.output
res = runner.invoke(__main__.cfgrib_cli, ['non-existent-command'])
assert res.exit_code == 2
|
# ... existing code ...
import click.testing
from cfgrib import __main__
# ... rest of the code ...
|
8f0caecc4accf8258e2cae664181680973e1add6
|
hftools/dataset/tests/test_helper.py
|
hftools/dataset/tests/test_helper.py
|
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertDictContainsSubset(dict(R="Ohm"),
helper._varname_unit_guess_db)
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertIn("R", helper._varname_unit_guess_db)
self.assertEqual("Ohm", helper._varname_unit_guess_db["R"])
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
Fix to remove DeprecationWarning message from test log
|
Fix to remove DeprecationWarning message from test log
|
Python
|
bsd-3-clause
|
hftools/hftools
|
python
|
## Code Before:
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertDictContainsSubset(dict(R="Ohm"),
helper._varname_unit_guess_db)
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
## Instruction:
Fix to remove DeprecationWarning message from test log
## Code After:
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertIn("R", helper._varname_unit_guess_db)
self.assertEqual("Ohm", helper._varname_unit_guess_db["R"])
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
// ... existing code ...
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertIn("R", helper._varname_unit_guess_db)
self.assertEqual("Ohm", helper._varname_unit_guess_db["R"])
class Test_guess_unit_from_varname(TestCase):
// ... rest of the code ...
|
31efd439c29a3a64b53c53475f630d31f51d3eb5
|
src/main/java/de/springbootbuch/webmvc/DemoFilter.java
|
src/main/java/de/springbootbuch/webmvc/DemoFilter.java
|
package de.springbootbuch.webmvc;
import java.io.IOException;
import javax.inject.Provider;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Part of springbootbuch.de.
*
* @author Michael J. Simons
* @author @rotnroll666
*/
@Component
public class DemoFilter implements Filter {
private static final Logger LOG = LoggerFactory
.getLogger(DemoFilter.class);
private final Provider<ShoppingCart> shoppingCart;
public DemoFilter(
Provider<ShoppingCart> shoppingCart) {
this.shoppingCart = shoppingCart;
}
// Init und Destroy der Übersicht
// halber nicht gezeigt
@Override
public void init(
FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(
ServletRequest request,
ServletResponse response,
FilterChain chain
) throws IOException, ServletException {
chain.doFilter(request, response);
LOG.info(
"Shopping cart is {} empty",
shoppingCart.get()
.getContent().isEmpty() ? "" : "not"
);
}
@Override
public void destroy() {
}
}
|
package de.springbootbuch.webmvc;
import java.io.IOException;
import javax.inject.Provider;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Part of springbootbuch.de.
*
* @author Michael J. Simons
* @author @rotnroll666
*/
@Component
public class DemoFilter implements Filter {
private static final Logger LOG = LoggerFactory
.getLogger(DemoFilter.class);
private final Provider<ShoppingCart> shoppingCart;
public DemoFilter(
Provider<ShoppingCart> shoppingCart) {
this.shoppingCart = shoppingCart;
}
// Init und Destroy der Übersicht
// halber nicht gezeigt
@Override
public void init(
FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(
ServletRequest request,
ServletResponse response,
FilterChain chain
) throws IOException, ServletException {
chain.doFilter(request, response);
if(request instanceof HttpServletRequest && ((HttpServletRequest)request).getSession(false) != null) {
LOG.info(
"Request from {}",
shoppingCart.get()
.getContent().isEmpty() ? "" : "not"
);
}
}
@Override
public void destroy() {
}
}
|
Fix bug when no session scope
|
Fix bug when no session scope
|
Java
|
apache-2.0
|
springbootbuch/webmvc,springbootbuch/webmvc
|
java
|
## Code Before:
package de.springbootbuch.webmvc;
import java.io.IOException;
import javax.inject.Provider;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Part of springbootbuch.de.
*
* @author Michael J. Simons
* @author @rotnroll666
*/
@Component
public class DemoFilter implements Filter {
private static final Logger LOG = LoggerFactory
.getLogger(DemoFilter.class);
private final Provider<ShoppingCart> shoppingCart;
public DemoFilter(
Provider<ShoppingCart> shoppingCart) {
this.shoppingCart = shoppingCart;
}
// Init und Destroy der Übersicht
// halber nicht gezeigt
@Override
public void init(
FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(
ServletRequest request,
ServletResponse response,
FilterChain chain
) throws IOException, ServletException {
chain.doFilter(request, response);
LOG.info(
"Shopping cart is {} empty",
shoppingCart.get()
.getContent().isEmpty() ? "" : "not"
);
}
@Override
public void destroy() {
}
}
## Instruction:
Fix bug when no session scope
## Code After:
package de.springbootbuch.webmvc;
import java.io.IOException;
import javax.inject.Provider;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
* Part of springbootbuch.de.
*
* @author Michael J. Simons
* @author @rotnroll666
*/
@Component
public class DemoFilter implements Filter {
private static final Logger LOG = LoggerFactory
.getLogger(DemoFilter.class);
private final Provider<ShoppingCart> shoppingCart;
public DemoFilter(
Provider<ShoppingCart> shoppingCart) {
this.shoppingCart = shoppingCart;
}
// Init und Destroy der Übersicht
// halber nicht gezeigt
@Override
public void init(
FilterConfig filterConfig) throws ServletException {
}
@Override
public void doFilter(
ServletRequest request,
ServletResponse response,
FilterChain chain
) throws IOException, ServletException {
chain.doFilter(request, response);
if(request instanceof HttpServletRequest && ((HttpServletRequest)request).getSession(false) != null) {
LOG.info(
"Request from {}",
shoppingCart.get()
.getContent().isEmpty() ? "" : "not"
);
}
}
@Override
public void destroy() {
}
}
|
...
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
...
FilterChain chain
) throws IOException, ServletException {
chain.doFilter(request, response);
if(request instanceof HttpServletRequest && ((HttpServletRequest)request).getSession(false) != null) {
LOG.info(
"Request from {}",
shoppingCart.get()
.getContent().isEmpty() ? "" : "not"
);
}
}
@Override
...
|
a40a925c29b04b1b6822566e72db4afa5552479c
|
pygame/_error.py
|
pygame/_error.py
|
from pygame._sdl import sdl, ffi
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], int) or
not isinstance(rect[1], int)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
return rect
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
|
from pygame._sdl import sdl, ffi
from numbers import Number
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
# This is as liberal as pygame when used for pygame.surface, but
# more liberal for pygame.display. I don't think the inconsistency
# matters
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], Number) or
not isinstance(rect[1], Number)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
# We'll throw a conversion TypeError here if someone is using a
# complex number, but so does pygame.
return int(rect[0]), int(rect[1])
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
|
Support arbitary numeric types for creating pygame surfaces
|
Support arbitary numeric types for creating pygame surfaces
|
Python
|
lgpl-2.1
|
CTPUG/pygame_cffi,CTPUG/pygame_cffi,CTPUG/pygame_cffi
|
python
|
## Code Before:
from pygame._sdl import sdl, ffi
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], int) or
not isinstance(rect[1], int)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
return rect
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
## Instruction:
Support arbitary numeric types for creating pygame surfaces
## Code After:
from pygame._sdl import sdl, ffi
from numbers import Number
class SDLError(Exception):
"""SDL error."""
@classmethod
def from_sdl_error(cls):
return cls(ffi.string(sdl.SDL_GetError()))
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
# This is as liberal as pygame when used for pygame.surface, but
# more liberal for pygame.display. I don't think the inconsistency
# matters
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], Number) or
not isinstance(rect[1], Number)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
# We'll throw a conversion TypeError here if someone is using a
# complex number, but so does pygame.
return int(rect[0]), int(rect[1])
def get_error():
err = ffi.string(sdl.SDL_GetError())
if not isinstance(err, str):
return err.decode('utf8')
return err
def set_error(errmsg):
if not isinstance(errmsg, bytes):
errmsg = errmsg.encode('utf8')
sdl.SDL_SetError(errmsg)
|
# ... existing code ...
from pygame._sdl import sdl, ffi
from numbers import Number
class SDLError(Exception):
# ... modified code ...
def unpack_rect(rect):
"""Unpack the size and raise a type error if needed."""
# This is as liberal as pygame when used for pygame.surface, but
# more liberal for pygame.display. I don't think the inconsistency
# matters
if (not hasattr(rect, '__iter__') or
len(rect) != 2 or
not isinstance(rect[0], Number) or
not isinstance(rect[1], Number)):
raise TypeError("expected tuple of two integers but got %r"
% type(rect))
# We'll throw a conversion TypeError here if someone is using a
# complex number, but so does pygame.
return int(rect[0]), int(rect[1])
def get_error():
# ... rest of the code ...
|
593a5c1970adb0cdb71efbf5774775d2ca36b36a
|
src/main/java/com/teamacronymcoders/contenttweaker/modules/materials/parttypes/CTCreatedPartType.java
|
src/main/java/com/teamacronymcoders/contenttweaker/modules/materials/parttypes/CTCreatedPartType.java
|
package com.teamacronymcoders.contenttweaker.modules.materials.parttypes;
import com.teamacronymcoders.base.materialsystem.materialparts.MaterialPart;
import com.teamacronymcoders.base.materialsystem.parttype.PartType;
import com.teamacronymcoders.contenttweaker.modules.materials.functions.IRegisterMaterialPart;
import com.teamacronymcoders.contenttweaker.modules.materials.materialpartdata.IPartDataPiece;
import com.teamacronymcoders.contenttweaker.modules.materials.materialparts.CTMaterialPart;
import javax.annotation.Nonnull;
public class CTCreatedPartType extends PartType implements IPartType {
private final IRegisterMaterialPart materialPartRegister;
public CTCreatedPartType(String name, IRegisterMaterialPart materialPartRegister) {
super(name);
this.materialPartRegister = materialPartRegister;
}
@Override
public void setup(@Nonnull MaterialPart materialPart) {
this.materialPartRegister.register(new CTMaterialPart(materialPart));
}
@Override
public void setData(IPartDataPiece[] data) {
}
@Override
public Object getInternal() {
return this;
}
}
|
package com.teamacronymcoders.contenttweaker.modules.materials.parttypes;
import com.teamacronymcoders.base.materialsystem.MaterialUser;
import com.teamacronymcoders.base.materialsystem.materialparts.MaterialPart;
import com.teamacronymcoders.base.materialsystem.parttype.PartType;
import com.teamacronymcoders.contenttweaker.modules.materials.functions.IRegisterMaterialPart;
import com.teamacronymcoders.contenttweaker.modules.materials.materialpartdata.IPartDataPiece;
import com.teamacronymcoders.contenttweaker.modules.materials.materialparts.CTMaterialPart;
import javax.annotation.Nonnull;
public class CTCreatedPartType extends PartType implements IPartType {
private final IRegisterMaterialPart materialPartRegister;
public CTCreatedPartType(String name, IRegisterMaterialPart materialPartRegister) {
super(name);
this.materialPartRegister = materialPartRegister;
}
@Override
public void setup(@Nonnull MaterialPart materialPart, @Nonnull MaterialUser materialUser) {
this.materialPartRegister.register(new CTMaterialPart(materialPart));
}
@Override
public void setData(IPartDataPiece[] data) {
}
@Override
public Object getInternal() {
return this;
}
}
|
Fix Part type to match updates to Material System
|
Fix Part type to match updates to Material System
|
Java
|
mit
|
The-Acronym-Coders/Tailored-Objects,The-Acronym-Coders/ContentTweaker
|
java
|
## Code Before:
package com.teamacronymcoders.contenttweaker.modules.materials.parttypes;
import com.teamacronymcoders.base.materialsystem.materialparts.MaterialPart;
import com.teamacronymcoders.base.materialsystem.parttype.PartType;
import com.teamacronymcoders.contenttweaker.modules.materials.functions.IRegisterMaterialPart;
import com.teamacronymcoders.contenttweaker.modules.materials.materialpartdata.IPartDataPiece;
import com.teamacronymcoders.contenttweaker.modules.materials.materialparts.CTMaterialPart;
import javax.annotation.Nonnull;
public class CTCreatedPartType extends PartType implements IPartType {
private final IRegisterMaterialPart materialPartRegister;
public CTCreatedPartType(String name, IRegisterMaterialPart materialPartRegister) {
super(name);
this.materialPartRegister = materialPartRegister;
}
@Override
public void setup(@Nonnull MaterialPart materialPart) {
this.materialPartRegister.register(new CTMaterialPart(materialPart));
}
@Override
public void setData(IPartDataPiece[] data) {
}
@Override
public Object getInternal() {
return this;
}
}
## Instruction:
Fix Part type to match updates to Material System
## Code After:
package com.teamacronymcoders.contenttweaker.modules.materials.parttypes;
import com.teamacronymcoders.base.materialsystem.MaterialUser;
import com.teamacronymcoders.base.materialsystem.materialparts.MaterialPart;
import com.teamacronymcoders.base.materialsystem.parttype.PartType;
import com.teamacronymcoders.contenttweaker.modules.materials.functions.IRegisterMaterialPart;
import com.teamacronymcoders.contenttweaker.modules.materials.materialpartdata.IPartDataPiece;
import com.teamacronymcoders.contenttweaker.modules.materials.materialparts.CTMaterialPart;
import javax.annotation.Nonnull;
public class CTCreatedPartType extends PartType implements IPartType {
private final IRegisterMaterialPart materialPartRegister;
public CTCreatedPartType(String name, IRegisterMaterialPart materialPartRegister) {
super(name);
this.materialPartRegister = materialPartRegister;
}
@Override
public void setup(@Nonnull MaterialPart materialPart, @Nonnull MaterialUser materialUser) {
this.materialPartRegister.register(new CTMaterialPart(materialPart));
}
@Override
public void setData(IPartDataPiece[] data) {
}
@Override
public Object getInternal() {
return this;
}
}
|
# ... existing code ...
package com.teamacronymcoders.contenttweaker.modules.materials.parttypes;
import com.teamacronymcoders.base.materialsystem.MaterialUser;
import com.teamacronymcoders.base.materialsystem.materialparts.MaterialPart;
import com.teamacronymcoders.base.materialsystem.parttype.PartType;
import com.teamacronymcoders.contenttweaker.modules.materials.functions.IRegisterMaterialPart;
# ... modified code ...
}
@Override
public void setup(@Nonnull MaterialPart materialPart, @Nonnull MaterialUser materialUser) {
this.materialPartRegister.register(new CTMaterialPart(materialPart));
}
# ... rest of the code ...
|
30431b5633d4d43bd289f48fda41537544ca61f4
|
src/net/zephyrizing/http_server/HttpServer.java
|
src/net/zephyrizing/http_server/HttpServer.java
|
package net.zephyrizing.http_server;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.Socket;
public class HttpServer {
public static void main(String[] args) throws Exception {
int portNumber = Integer.parseInt(args[0]);
System.err.println("Starting server on port " + args[0]);
try (ServerSocket listenSocket = new ServerSocket(portNumber)) {
System.err.println("Listening for clients...");
try (Socket socket = listenSocket.accept();
PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
BufferedReader in = new BufferedReader(
new InputStreamReader(socket.getInputStream()));
BufferedReader stdIn = new BufferedReader(
new InputStreamReader(System.in))) {
System.err.println("Connected to client. Proceeding to echo...");
String clientInput;
while ((clientInput = in.readLine()) != null) {
System.out.println("Got: "+clientInput);
out.println("Echo: "+clientInput);
}
}
}
}
}
|
package net.zephyrizing.http_server;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.Socket;
public class HttpServer {
public static void main(String[] args) throws Exception {
int portNumber;
if (args.length == 1) {
portNumber = Integer.parseInt(args[0]);
} else {
portNumber = 80;
}
System.err.println("Starting server on port " + portNumber);
try (ServerSocket listenSocket = new ServerSocket(portNumber)) {
System.err.println("Listening for clients...");
try (Socket socket = listenSocket.accept();
PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
BufferedReader in = new BufferedReader(
new InputStreamReader(socket.getInputStream()));
BufferedReader stdIn = new BufferedReader(
new InputStreamReader(System.in))) {
System.err.println("Connected to client.");
String request = in.readLine();
String[] params = request.split(" ");
assert(params.length == 3);
String method = params[0];
String path = params[1];
String protocolVersion = params[2];
System.out.format("Client requested to %s file %s over %s.\n",
method, path, protocolVersion);
out.format("%s 200 OK\r\n", protocolVersion);
}
}
}
}
|
Implement a very basic http server
|
Implement a very basic http server
|
Java
|
mit
|
RadicalZephyr/http-server
|
java
|
## Code Before:
package net.zephyrizing.http_server;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.Socket;
public class HttpServer {
public static void main(String[] args) throws Exception {
int portNumber = Integer.parseInt(args[0]);
System.err.println("Starting server on port " + args[0]);
try (ServerSocket listenSocket = new ServerSocket(portNumber)) {
System.err.println("Listening for clients...");
try (Socket socket = listenSocket.accept();
PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
BufferedReader in = new BufferedReader(
new InputStreamReader(socket.getInputStream()));
BufferedReader stdIn = new BufferedReader(
new InputStreamReader(System.in))) {
System.err.println("Connected to client. Proceeding to echo...");
String clientInput;
while ((clientInput = in.readLine()) != null) {
System.out.println("Got: "+clientInput);
out.println("Echo: "+clientInput);
}
}
}
}
}
## Instruction:
Implement a very basic http server
## Code After:
package net.zephyrizing.http_server;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.Socket;
public class HttpServer {
public static void main(String[] args) throws Exception {
int portNumber;
if (args.length == 1) {
portNumber = Integer.parseInt(args[0]);
} else {
portNumber = 80;
}
System.err.println("Starting server on port " + portNumber);
try (ServerSocket listenSocket = new ServerSocket(portNumber)) {
System.err.println("Listening for clients...");
try (Socket socket = listenSocket.accept();
PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
BufferedReader in = new BufferedReader(
new InputStreamReader(socket.getInputStream()));
BufferedReader stdIn = new BufferedReader(
new InputStreamReader(System.in))) {
System.err.println("Connected to client.");
String request = in.readLine();
String[] params = request.split(" ");
assert(params.length == 3);
String method = params[0];
String path = params[1];
String protocolVersion = params[2];
System.out.format("Client requested to %s file %s over %s.\n",
method, path, protocolVersion);
out.format("%s 200 OK\r\n", protocolVersion);
}
}
}
}
|
# ... existing code ...
public class HttpServer {
public static void main(String[] args) throws Exception {
int portNumber;
if (args.length == 1) {
portNumber = Integer.parseInt(args[0]);
} else {
portNumber = 80;
}
System.err.println("Starting server on port " + portNumber);
try (ServerSocket listenSocket = new ServerSocket(portNumber)) {
System.err.println("Listening for clients...");
# ... modified code ...
new InputStreamReader(socket.getInputStream()));
BufferedReader stdIn = new BufferedReader(
new InputStreamReader(System.in))) {
System.err.println("Connected to client.");
String request = in.readLine();
String[] params = request.split(" ");
assert(params.length == 3);
String method = params[0];
String path = params[1];
String protocolVersion = params[2];
System.out.format("Client requested to %s file %s over %s.\n",
method, path, protocolVersion);
out.format("%s 200 OK\r\n", protocolVersion);
}
}
}
# ... rest of the code ...
|
87936000f43b9dfc88dcd2c0d59ce018fb1e8990
|
src/net.michaelsavich.notification/net/michaelsavich/notification/SynchronousNotificationCenter.java
|
src/net.michaelsavich.notification/net/michaelsavich/notification/SynchronousNotificationCenter.java
|
package net.michaelsavich.notification;
import java.util.HashSet;
/**
* An instance of NotificationCenter that dispatches {@link Notification notifications} to their {@link NotificationObserver observers} serially.
* Note that an instance of this object is returned by the static method {@link NotificationCenter#primary()}. This class is exposed as public primarily because it
* may be useful to subclass SynchronousNotificationCenter to implement custom bookkeeping. Furthermore, if for some reason you don't want to use
* the singleton-like behavior of the static methods in NotificationCenter, you can instantiate this class directly and manage it yourself.
*/
public class SynchronousNotificationCenter extends NotificationCenter {
/**
* {@inheritDoc}
* <p>
* The implementation of this method provided by SynchronousNotificationCenter is
* synchronous, and will halt everything until all objects have finished responding.
* As such, avoid adding {@link NotificationObserver observers} with long-running callbacks to a SynchronousNotificationCenter.
* </p>
* <p>
* When calling this method, remember that no guarantee is made regarding the order in which are notified.
* </p>
*/
@Override
public void post(Notification notification) {
dispatchTable.getOrDefault(notification.getName(), new HashSet<>())
.forEach(o -> o.receiveNotification(notification));
}
}
|
package net.michaelsavich.notification;
import java.util.HashSet;
/**
* An instance of NotificationCenter that dispatches {@link Notification notifications} to their {@link NotificationObserver observers} serially.
* Note that an instance of this object is returned by the static method {@link NotificationCenter#primary()}. This class is exposed as public primarily because it
* may be useful to subclass SynchronousNotificationCenter to implement custom bookkeeping. Furthermore, if for some reason you don't want to use
* the singleton-like behavior of the static methods in NotificationCenter, you can instantiate this class directly and manage it yourself.
*/
public class SynchronousNotificationCenter extends NotificationCenter {
/**
* {@inheritDoc}
* <p>
* The implementation of this method provided by SynchronousNotificationCenter is
* synchronous, and will halt everything until all objects have finished responding.
* As such, avoid adding {@link NotificationObserver observers} with long-running callbacks to a SynchronousNotificationCenter.
* </p>
* <p>
* When calling this method, remember that no guarantee is made regarding the order in which are notified.
* </p>
*/
@Override
public void post(Notification notification) {
getObservers(notification.getName())
.forEach(o -> o.receiveNotification(notification));
}
}
|
Switch to using protected getter
|
Switch to using protected getter
|
Java
|
unlicense
|
michaelsavich/NotificationCenter
|
java
|
## Code Before:
package net.michaelsavich.notification;
import java.util.HashSet;
/**
* An instance of NotificationCenter that dispatches {@link Notification notifications} to their {@link NotificationObserver observers} serially.
* Note that an instance of this object is returned by the static method {@link NotificationCenter#primary()}. This class is exposed as public primarily because it
* may be useful to subclass SynchronousNotificationCenter to implement custom bookkeeping. Furthermore, if for some reason you don't want to use
* the singleton-like behavior of the static methods in NotificationCenter, you can instantiate this class directly and manage it yourself.
*/
public class SynchronousNotificationCenter extends NotificationCenter {
/**
* {@inheritDoc}
* <p>
* The implementation of this method provided by SynchronousNotificationCenter is
* synchronous, and will halt everything until all objects have finished responding.
* As such, avoid adding {@link NotificationObserver observers} with long-running callbacks to a SynchronousNotificationCenter.
* </p>
* <p>
* When calling this method, remember that no guarantee is made regarding the order in which are notified.
* </p>
*/
@Override
public void post(Notification notification) {
dispatchTable.getOrDefault(notification.getName(), new HashSet<>())
.forEach(o -> o.receiveNotification(notification));
}
}
## Instruction:
Switch to using protected getter
## Code After:
package net.michaelsavich.notification;
import java.util.HashSet;
/**
* An instance of NotificationCenter that dispatches {@link Notification notifications} to their {@link NotificationObserver observers} serially.
* Note that an instance of this object is returned by the static method {@link NotificationCenter#primary()}. This class is exposed as public primarily because it
* may be useful to subclass SynchronousNotificationCenter to implement custom bookkeeping. Furthermore, if for some reason you don't want to use
* the singleton-like behavior of the static methods in NotificationCenter, you can instantiate this class directly and manage it yourself.
*/
public class SynchronousNotificationCenter extends NotificationCenter {
/**
* {@inheritDoc}
* <p>
* The implementation of this method provided by SynchronousNotificationCenter is
* synchronous, and will halt everything until all objects have finished responding.
* As such, avoid adding {@link NotificationObserver observers} with long-running callbacks to a SynchronousNotificationCenter.
* </p>
* <p>
* When calling this method, remember that no guarantee is made regarding the order in which are notified.
* </p>
*/
@Override
public void post(Notification notification) {
getObservers(notification.getName())
.forEach(o -> o.receiveNotification(notification));
}
}
|
# ... existing code ...
*/
@Override
public void post(Notification notification) {
getObservers(notification.getName())
.forEach(o -> o.receiveNotification(notification));
}
# ... rest of the code ...
|
99909048bc702e21e980bb1167caf9217aa31196
|
steel/fields/strings.py
|
steel/fields/strings.py
|
import codecs
from steel.fields import Field
from steel.fields.mixin import Fixed
__all__ = ['Bytes', 'String', 'FixedBytes', 'FixedString']
class Bytes(Field):
"A stream of bytes that should be left unconverted"
def encode(self, value):
# Nothing to do here
return value
def decode(self, value):
# Nothing to do here
return value
class String(Field):
"A string that gets converted using a specified encoding"
def __init__(self, *args, encoding, **kwargs):
# Bail out early if the encoding isn't valid
codecs.lookup(encoding)
self.encoding = encoding
super(String, self).__init__(*args, **kwargs)
def encode(self, value):
return value.encode(self.encoding)
def decode(self, value):
return value.decode(self.encoding)
class FixedBytes(Fixed, Bytes):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
class FixedString(Fixed, String):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
|
import codecs
from steel.fields import Field
from steel.fields.mixin import Fixed
__all__ = ['Bytes', 'String', 'FixedBytes', 'FixedString']
class Bytes(Field):
"A stream of bytes that should be left unconverted"
def encode(self, value):
# Nothing to do here
return value
def decode(self, value):
# Nothing to do here
return value
class String(Field):
"A string that gets converted using a specified encoding"
def __init__(self, *args, encoding, **kwargs):
# Bail out early if the encoding isn't valid
codecs.lookup(encoding)
self.encoding = encoding
super(String, self).__init__(*args, **kwargs)
def encode(self, value):
return value.encode(self.encoding)
def decode(self, value):
return value.decode(self.encoding)
class FixedBytes(Fixed, Bytes):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
class FixedString(Fixed, String):
"A string that will always be set to the same value"
# The mixin does the heavy lifting
pass
|
Fix the docstring for FixedString
|
Fix the docstring for FixedString
|
Python
|
bsd-3-clause
|
gulopine/steel-experiment
|
python
|
## Code Before:
import codecs
from steel.fields import Field
from steel.fields.mixin import Fixed
__all__ = ['Bytes', 'String', 'FixedBytes', 'FixedString']
class Bytes(Field):
"A stream of bytes that should be left unconverted"
def encode(self, value):
# Nothing to do here
return value
def decode(self, value):
# Nothing to do here
return value
class String(Field):
"A string that gets converted using a specified encoding"
def __init__(self, *args, encoding, **kwargs):
# Bail out early if the encoding isn't valid
codecs.lookup(encoding)
self.encoding = encoding
super(String, self).__init__(*args, **kwargs)
def encode(self, value):
return value.encode(self.encoding)
def decode(self, value):
return value.decode(self.encoding)
class FixedBytes(Fixed, Bytes):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
class FixedString(Fixed, String):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
## Instruction:
Fix the docstring for FixedString
## Code After:
import codecs
from steel.fields import Field
from steel.fields.mixin import Fixed
__all__ = ['Bytes', 'String', 'FixedBytes', 'FixedString']
class Bytes(Field):
"A stream of bytes that should be left unconverted"
def encode(self, value):
# Nothing to do here
return value
def decode(self, value):
# Nothing to do here
return value
class String(Field):
"A string that gets converted using a specified encoding"
def __init__(self, *args, encoding, **kwargs):
# Bail out early if the encoding isn't valid
codecs.lookup(encoding)
self.encoding = encoding
super(String, self).__init__(*args, **kwargs)
def encode(self, value):
return value.encode(self.encoding)
def decode(self, value):
return value.decode(self.encoding)
class FixedBytes(Fixed, Bytes):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
class FixedString(Fixed, String):
"A string that will always be set to the same value"
# The mixin does the heavy lifting
pass
|
// ... existing code ...
class FixedString(Fixed, String):
"A string that will always be set to the same value"
# The mixin does the heavy lifting
pass
// ... rest of the code ...
|
c65e2b3bb2d43a5d12d50cf79636e94a6a1f1dc5
|
Algo.py
|
Algo.py
|
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
|
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
assert population_size % 2 == 0
self.half_population_size = population_size // 2
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
|
Check that population_size is even, and set half_population_size.
|
Check that population_size is even, and set half_population_size.
This will be used by genetic algorithms.
|
Python
|
isc
|
dargor/python-guess-random-color,dargor/python-guess-random-color
|
python
|
## Code Before:
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
## Instruction:
Check that population_size is even, and set half_population_size.
This will be used by genetic algorithms.
## Code After:
from abc import ABC, abstractmethod
from Color import RandomColor
class Algo(ABC):
def __init__(self, population_size):
assert population_size % 2 == 0
self.half_population_size = population_size // 2
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
self.population = []
for i in range(self.population_size):
self.population.append(RandomColor())
def dump(self):
for color in self.population:
print('{} '.format(color), end='')
print(flush=True)
@abstractmethod
def tick(self, deltas):
pass
|
// ... existing code ...
class Algo(ABC):
def __init__(self, population_size):
assert population_size % 2 == 0
self.half_population_size = population_size // 2
self.population_size = population_size
self.set_random_population()
def set_random_population(self):
// ... rest of the code ...
|
8ca76d1ce7c0aab8faafd486c642effcd9502518
|
src/chess/MoveData.java
|
src/chess/MoveData.java
|
package chess;
public class MoveData {
private int toRow;
private int toCol;
private boolean isACapture;
public MoveData(int row, int col, boolean capture) {
toRow = row;
toCol = col;
isACapture = capture;
}
public int getRow() {
return toRow;
}
public int getCol() {
return toCol;
}
public boolean checkCapture() {
return isACapture;
}
}
|
package chess;
import chess.pieces.Piece;
public class MoveData {
private Piece startPiece;
private int toRow;
private int toCol;
private int fromRow;
private int fromCol;
private boolean isACapture;
public MoveData(Piece startPiece, int fromRow, int fromCol, int toRow, int toCol, boolean capture) {
this.startPiece = startPiece;
this.fromRow = fromRow;
this.fromCol = fromCol;
this.toRow = toRow;
this.toCol = toCol;
isACapture = capture;
}
public Piece getPiece() {
return startPiece;
}
public int getStartRow() {
return fromRow;
}
public int getStartCol() {
return fromCol;
}
public int getEndRow() {
return toRow;
}
public int getEndCol() {
return toCol;
}
public boolean checkCapture() {
return isACapture;
}
}
|
Add from squares and calling piece data.
|
Add from squares and calling piece data.
|
Java
|
unlicense
|
b-lu8961/ChessPlus
|
java
|
## Code Before:
package chess;
public class MoveData {
private int toRow;
private int toCol;
private boolean isACapture;
public MoveData(int row, int col, boolean capture) {
toRow = row;
toCol = col;
isACapture = capture;
}
public int getRow() {
return toRow;
}
public int getCol() {
return toCol;
}
public boolean checkCapture() {
return isACapture;
}
}
## Instruction:
Add from squares and calling piece data.
## Code After:
package chess;
import chess.pieces.Piece;
public class MoveData {
private Piece startPiece;
private int toRow;
private int toCol;
private int fromRow;
private int fromCol;
private boolean isACapture;
public MoveData(Piece startPiece, int fromRow, int fromCol, int toRow, int toCol, boolean capture) {
this.startPiece = startPiece;
this.fromRow = fromRow;
this.fromCol = fromCol;
this.toRow = toRow;
this.toCol = toCol;
isACapture = capture;
}
public Piece getPiece() {
return startPiece;
}
public int getStartRow() {
return fromRow;
}
public int getStartCol() {
return fromCol;
}
public int getEndRow() {
return toRow;
}
public int getEndCol() {
return toCol;
}
public boolean checkCapture() {
return isACapture;
}
}
|
...
package chess;
import chess.pieces.Piece;
public class MoveData {
private Piece startPiece;
private int toRow;
private int toCol;
private int fromRow;
private int fromCol;
private boolean isACapture;
public MoveData(Piece startPiece, int fromRow, int fromCol, int toRow, int toCol, boolean capture) {
this.startPiece = startPiece;
this.fromRow = fromRow;
this.fromCol = fromCol;
this.toRow = toRow;
this.toCol = toCol;
isACapture = capture;
}
public Piece getPiece() {
return startPiece;
}
public int getStartRow() {
return fromRow;
}
public int getStartCol() {
return fromCol;
}
public int getEndRow() {
return toRow;
}
public int getEndCol() {
return toCol;
}
public boolean checkCapture() {
...
|
21d45e38d07a413aeeb19e10a68e540d1f6d5851
|
core/forms.py
|
core/forms.py
|
from core import settings as stCore
from django import forms
from django.conf import settings as st
from django.contrib.flatpages.admin import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
from core import settings as stCore
from django import forms
from django.conf import settings as st
from flatpages_i18n.forms import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
Remove last references to flatpage so it doesnt show up on admin page
|
Remove last references to flatpage so it doesnt show up on admin page
|
Python
|
agpl-3.0
|
tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador
|
python
|
## Code Before:
from core import settings as stCore
from django import forms
from django.conf import settings as st
from django.contrib.flatpages.admin import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
## Instruction:
Remove last references to flatpage so it doesnt show up on admin page
## Code After:
from core import settings as stCore
from django import forms
from django.conf import settings as st
from flatpages_i18n.forms import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
// ... existing code ...
from core import settings as stCore
from django import forms
from django.conf import settings as st
from flatpages_i18n.forms import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
// ... rest of the code ...
|
7b6754e3a37fb82e4c101fafc0257c03ae437282
|
org.cohorte.studio.eclipse.ui.node/src/org/cohorte/studio/eclipse/ui/node/Activator.java
|
org.cohorte.studio.eclipse.ui.node/src/org/cohorte/studio/eclipse/ui/node/Activator.java
|
package org.cohorte.studio.eclipse.ui.node;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
// The plug-in ID
public static final String PLUGIN_ID = "org.cohorte.studio.eclipse.ui.application"; //$NON-NLS-1$
// The shared instance
private static Activator plugin;
/**
* The constructor
*/
public Activator() {
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
*/
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
*/
public void stop(BundleContext context) throws Exception {
plugin = null;
super.stop(context);
}
/**
* Returns the shared instance
*
* @return the shared instance
*/
public static Activator getDefault() {
return plugin;
}
}
|
package org.cohorte.studio.eclipse.ui.node;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
// The plug-in ID
public static final String PLUGIN_ID = "org.cohorte.studio.eclipse.ui.node"; //$NON-NLS-1$
// The shared instance
private static Activator plugin;
/**
* The constructor
*/
public Activator() {
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
*/
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
*/
public void stop(BundleContext context) throws Exception {
plugin = null;
super.stop(context);
}
/**
* Returns the shared instance
*
* @return the shared instance
*/
public static Activator getDefault() {
return plugin;
}
}
|
Fix plug-in ID to match bundle's symbolic name.
|
Fix plug-in ID to match bundle's symbolic name.
|
Java
|
epl-1.0
|
cohorte/cohorte-studio
|
java
|
## Code Before:
package org.cohorte.studio.eclipse.ui.node;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
// The plug-in ID
public static final String PLUGIN_ID = "org.cohorte.studio.eclipse.ui.application"; //$NON-NLS-1$
// The shared instance
private static Activator plugin;
/**
* The constructor
*/
public Activator() {
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
*/
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
*/
public void stop(BundleContext context) throws Exception {
plugin = null;
super.stop(context);
}
/**
* Returns the shared instance
*
* @return the shared instance
*/
public static Activator getDefault() {
return plugin;
}
}
## Instruction:
Fix plug-in ID to match bundle's symbolic name.
## Code After:
package org.cohorte.studio.eclipse.ui.node;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
// The plug-in ID
public static final String PLUGIN_ID = "org.cohorte.studio.eclipse.ui.node"; //$NON-NLS-1$
// The shared instance
private static Activator plugin;
/**
* The constructor
*/
public Activator() {
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
*/
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
}
/*
* (non-Javadoc)
* @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
*/
public void stop(BundleContext context) throws Exception {
plugin = null;
super.stop(context);
}
/**
* Returns the shared instance
*
* @return the shared instance
*/
public static Activator getDefault() {
return plugin;
}
}
|
...
public class Activator extends AbstractUIPlugin {
// The plug-in ID
public static final String PLUGIN_ID = "org.cohorte.studio.eclipse.ui.node"; //$NON-NLS-1$
// The shared instance
private static Activator plugin;
...
|
b6d61fef0fe372c7149fa52e2ab1acff144d0118
|
tests/fixtures/dummy/facilities.py
|
tests/fixtures/dummy/facilities.py
|
from fixture import DataSet
from .address import AddressData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
|
from fixture import DataSet
from .address import AddressData
from .finance import AccountData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
fee_account = AccountData.dummy_revenue
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
fee_account = AccountData.dummy_revenue
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
|
Add fee_account to BuildingData of legacy test base
|
Add fee_account to BuildingData of legacy test base
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
python
|
## Code Before:
from fixture import DataSet
from .address import AddressData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
## Instruction:
Add fee_account to BuildingData of legacy test base
## Code After:
from fixture import DataSet
from .address import AddressData
from .finance import AccountData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
fee_account = AccountData.dummy_revenue
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
fee_account = AccountData.dummy_revenue
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
|
...
from fixture import DataSet
from .address import AddressData
from .finance import AccountData
class SiteData(DataSet):
...
street = "dummy"
number = "01"
short_name = "abc"
fee_account = AccountData.dummy_revenue
class dummy_house2:
site = SiteData.dummy
...
street = "dummy"
number = "02"
short_name = "def"
fee_account = AccountData.dummy_revenue
class RoomData(DataSet):
...
|
c334f19745b252ad5d536b00cd7a032c2e1d603e
|
Services/ServiceLegacyMavenProxy/src/main/java/fr/synchrotron/soleil/ica/ci/service/legacymavenproxy/HttpArtifactCaller.java
|
Services/ServiceLegacyMavenProxy/src/main/java/fr/synchrotron/soleil/ica/ci/service/legacymavenproxy/HttpArtifactCaller.java
|
package fr.synchrotron.soleil.ica.ci.service.legacymavenproxy;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.http.HttpClient;
import org.vertx.java.core.http.HttpServerRequest;
/**
* @author Gregory Boissinot
*/
public class HttpArtifactCaller {
private final Vertx vertx;
private final String repoHost;
private final int repoPort;
private final String repoURIPath;
public HttpArtifactCaller(Vertx vertx,
String repoHost, int repoPort, String repoURIPath) {
this.vertx = vertx;
this.repoHost = repoHost;
this.repoPort = repoPort;
this.repoURIPath = repoURIPath;
}
public Vertx getVertx() {
return vertx;
}
public String getRepoHost() {
return repoHost;
}
public String buildRequestPath(final HttpServerRequest request) {
final String prefix = "/maven";
String artifactPath = request.path().substring(prefix.length() + 1);
return repoURIPath.endsWith("/") ? (repoURIPath + artifactPath) : (repoURIPath + "/" + artifactPath);
}
public HttpClient getPClient() {
return getVertx().createHttpClient()
.setHost(repoHost)
.setPort(repoPort)
.setConnectTimeout(10000);
}
}
|
package fr.synchrotron.soleil.ica.ci.service.legacymavenproxy;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.http.HttpClient;
import org.vertx.java.core.http.HttpServerRequest;
/**
* @author Gregory Boissinot
*/
public class HttpArtifactCaller {
private final Vertx vertx;
private final String repoHost;
private final int repoPort;
private final String repoURIPath;
public HttpArtifactCaller(Vertx vertx,
String repoHost, int repoPort, String repoURIPath) {
this.vertx = vertx;
this.repoHost = repoHost;
this.repoPort = repoPort;
this.repoURIPath = repoURIPath;
}
public Vertx getVertx() {
return vertx;
}
public String getRepoHost() {
return repoHost;
}
public String buildRequestPath(final HttpServerRequest request) {
final String prefix = HttpArtifactProxyEndpointVerticle.PROXY_PATH;
String artifactPath = request.path().substring(prefix.length() + 1);
return repoURIPath.endsWith("/") ? (repoURIPath + artifactPath) : (repoURIPath + "/" + artifactPath);
}
public HttpClient getPClient() {
return getVertx().createHttpClient()
.setHost(repoHost)
.setPort(repoPort)
.setConnectTimeout(10000);
}
}
|
Fix regression Use the proxy path
|
Fix regression
Use the proxy path
|
Java
|
mit
|
synchrotron-soleil-ica/continuous-materials,synchrotron-soleil-ica/continuous-materials,synchrotron-soleil-ica/continuous-materials
|
java
|
## Code Before:
package fr.synchrotron.soleil.ica.ci.service.legacymavenproxy;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.http.HttpClient;
import org.vertx.java.core.http.HttpServerRequest;
/**
* @author Gregory Boissinot
*/
public class HttpArtifactCaller {
private final Vertx vertx;
private final String repoHost;
private final int repoPort;
private final String repoURIPath;
public HttpArtifactCaller(Vertx vertx,
String repoHost, int repoPort, String repoURIPath) {
this.vertx = vertx;
this.repoHost = repoHost;
this.repoPort = repoPort;
this.repoURIPath = repoURIPath;
}
public Vertx getVertx() {
return vertx;
}
public String getRepoHost() {
return repoHost;
}
public String buildRequestPath(final HttpServerRequest request) {
final String prefix = "/maven";
String artifactPath = request.path().substring(prefix.length() + 1);
return repoURIPath.endsWith("/") ? (repoURIPath + artifactPath) : (repoURIPath + "/" + artifactPath);
}
public HttpClient getPClient() {
return getVertx().createHttpClient()
.setHost(repoHost)
.setPort(repoPort)
.setConnectTimeout(10000);
}
}
## Instruction:
Fix regression
Use the proxy path
## Code After:
package fr.synchrotron.soleil.ica.ci.service.legacymavenproxy;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.http.HttpClient;
import org.vertx.java.core.http.HttpServerRequest;
/**
* @author Gregory Boissinot
*/
public class HttpArtifactCaller {
private final Vertx vertx;
private final String repoHost;
private final int repoPort;
private final String repoURIPath;
public HttpArtifactCaller(Vertx vertx,
String repoHost, int repoPort, String repoURIPath) {
this.vertx = vertx;
this.repoHost = repoHost;
this.repoPort = repoPort;
this.repoURIPath = repoURIPath;
}
public Vertx getVertx() {
return vertx;
}
public String getRepoHost() {
return repoHost;
}
public String buildRequestPath(final HttpServerRequest request) {
final String prefix = HttpArtifactProxyEndpointVerticle.PROXY_PATH;
String artifactPath = request.path().substring(prefix.length() + 1);
return repoURIPath.endsWith("/") ? (repoURIPath + artifactPath) : (repoURIPath + "/" + artifactPath);
}
public HttpClient getPClient() {
return getVertx().createHttpClient()
.setHost(repoHost)
.setPort(repoPort)
.setConnectTimeout(10000);
}
}
|
# ... existing code ...
public String buildRequestPath(final HttpServerRequest request) {
final String prefix = HttpArtifactProxyEndpointVerticle.PROXY_PATH;
String artifactPath = request.path().substring(prefix.length() + 1);
return repoURIPath.endsWith("/") ? (repoURIPath + artifactPath) : (repoURIPath + "/" + artifactPath);
}
# ... rest of the code ...
|
693dc9d8448740e1a1c4543cc3a91e3769fa7a3e
|
pySPM/utils/plot.py
|
pySPM/utils/plot.py
|
import numpy as np
import matplotlib.pyplot as plt
def plotMask(ax, mask, color, **kargs):
import copy
m = np.ma.masked_array(mask, ~mask)
palette = copy.copy(plt.cm.gray)
palette.set_over(color, 1.0)
ax.imshow(m, cmap=palette, vmin=0, vmax=0.5, **kargs)
def Xdist(ax,left, right, y, color='r', linestyle=':', fmt='.2f', xtransf=lambda x: x, **kargs):
ax.axvline(left,color=color, linestyle=linestyle)
ax.axvline(right,color=color, linestyle=linestyle)
s = "{:"+fmt+"}"+kargs.get('unit','')
ax.annotate(s.format(xtransf(right-left)),(.5*(left+right),y),(0,2),textcoords='offset pixels',va='bottom',ha='center')
ax.annotate("",(left,y),(right,y),arrowprops=dict(arrowstyle=kargs.get('arrowstyle','<->')))
|
import numpy as np
import matplotlib.pyplot as plt
def plotMask(ax, mask, color, **kargs):
import copy
m = np.ma.masked_array(mask, ~mask)
palette = copy.copy(plt.cm.gray)
palette.set_over(color, 1.0)
ax.imshow(m, cmap=palette, vmin=0, vmax=0.5, **kargs)
def Xdist(ax,left, right, y, color='r', linestyle=':', fmt='.2f', xtransf=lambda x: x, **kargs):
ax.axvline(left,color=color, linestyle=linestyle)
ax.axvline(right,color=color, linestyle=linestyle)
s = "{:"+fmt+"}"+kargs.get('unit','')
ax.annotate(s.format(xtransf(right-left)),(.5*(left+right),y),(0,2),textcoords='offset pixels',va='bottom',ha='center')
ax.annotate("",(left,y),(right,y),arrowprops=dict(arrowstyle=kargs.get('arrowstyle','<->')))
def DualPlot(ax, col1='C0',col2='C1'):
axb = ax.twinx()
axb.spines['left'].set_color(col1)
axb.spines['right'].set_color(col2)
ax.yaxis.label.set_color(col1)
axb.yaxis.label.set_color(col2)
ax.tick_params(axis='y', colors=col1)
axb.tick_params(axis='y', colors=col2)
return axb
|
Add helper function to create a DualPlot
|
Add helper function to create a DualPlot
|
Python
|
apache-2.0
|
scholi/pySPM
|
python
|
## Code Before:
import numpy as np
import matplotlib.pyplot as plt
def plotMask(ax, mask, color, **kargs):
import copy
m = np.ma.masked_array(mask, ~mask)
palette = copy.copy(plt.cm.gray)
palette.set_over(color, 1.0)
ax.imshow(m, cmap=palette, vmin=0, vmax=0.5, **kargs)
def Xdist(ax,left, right, y, color='r', linestyle=':', fmt='.2f', xtransf=lambda x: x, **kargs):
ax.axvline(left,color=color, linestyle=linestyle)
ax.axvline(right,color=color, linestyle=linestyle)
s = "{:"+fmt+"}"+kargs.get('unit','')
ax.annotate(s.format(xtransf(right-left)),(.5*(left+right),y),(0,2),textcoords='offset pixels',va='bottom',ha='center')
ax.annotate("",(left,y),(right,y),arrowprops=dict(arrowstyle=kargs.get('arrowstyle','<->')))
## Instruction:
Add helper function to create a DualPlot
## Code After:
import numpy as np
import matplotlib.pyplot as plt
def plotMask(ax, mask, color, **kargs):
import copy
m = np.ma.masked_array(mask, ~mask)
palette = copy.copy(plt.cm.gray)
palette.set_over(color, 1.0)
ax.imshow(m, cmap=palette, vmin=0, vmax=0.5, **kargs)
def Xdist(ax,left, right, y, color='r', linestyle=':', fmt='.2f', xtransf=lambda x: x, **kargs):
ax.axvline(left,color=color, linestyle=linestyle)
ax.axvline(right,color=color, linestyle=linestyle)
s = "{:"+fmt+"}"+kargs.get('unit','')
ax.annotate(s.format(xtransf(right-left)),(.5*(left+right),y),(0,2),textcoords='offset pixels',va='bottom',ha='center')
ax.annotate("",(left,y),(right,y),arrowprops=dict(arrowstyle=kargs.get('arrowstyle','<->')))
def DualPlot(ax, col1='C0',col2='C1'):
axb = ax.twinx()
axb.spines['left'].set_color(col1)
axb.spines['right'].set_color(col2)
ax.yaxis.label.set_color(col1)
axb.yaxis.label.set_color(col2)
ax.tick_params(axis='y', colors=col1)
axb.tick_params(axis='y', colors=col2)
return axb
|
# ... existing code ...
s = "{:"+fmt+"}"+kargs.get('unit','')
ax.annotate(s.format(xtransf(right-left)),(.5*(left+right),y),(0,2),textcoords='offset pixels',va='bottom',ha='center')
ax.annotate("",(left,y),(right,y),arrowprops=dict(arrowstyle=kargs.get('arrowstyle','<->')))
def DualPlot(ax, col1='C0',col2='C1'):
axb = ax.twinx()
axb.spines['left'].set_color(col1)
axb.spines['right'].set_color(col2)
ax.yaxis.label.set_color(col1)
axb.yaxis.label.set_color(col2)
ax.tick_params(axis='y', colors=col1)
axb.tick_params(axis='y', colors=col2)
return axb
# ... rest of the code ...
|
0d86b2eed6d0ec61a004aa3a6d13943d0f0b8a54
|
kernel/src/stdlib/stdio.h
|
kernel/src/stdlib/stdio.h
|
// File: stdlib/stdio.h
// Author: vodozhaba
// Created on: Aug 21, 2016
// Purpose: Provides standard I/O functions.
#pragma once
#include <stdarg.h>
#include "io/disk/file.h"
extern FileDescriptor* stdout;
extern FileDescriptor* stderr;
size_t StdoutWriteOp(FileDescriptor* file, size_t size, const void* buf);
size_t StderrWriteOp(FileDescriptor* file, size_t size, const void* buf);
int putchar(int character);
int _puts(const char* s);
int isspace (int c);
int fprintf(FileDescriptor* file, const char* fmt, ...);
int printf(const char* fmt, ...);
int sprintf(char* dest, const char* fmt, ...);
|
// File: stdlib/stdio.h
// Author: vodozhaba
// Created on: Aug 21, 2016
// Purpose: Provides standard I/O functions.
#pragma once
#include <stdarg.h>
#include "io/disk/file.h"
extern FileDescriptor* stdout;
extern FileDescriptor* stderr;
size_t StdoutWriteOp(FileDescriptor* file, size_t size, const void* buf);
size_t StderrWriteOp(FileDescriptor* file, size_t size, const void* buf);
int putchar(int character);
int _puts(const char* s);
int isspace (int c);
int fprintf(FileDescriptor* file, const char* fmt, ...);
int vfprintf(FileDescriptor* file, const char* fmt, va_list argss);
int printf(const char* fmt, ...);
int sprintf(char* dest, const char* fmt, ...);
|
Add vfprintf to the public interface just in case
|
Add vfprintf to the public interface just in case
|
C
|
mit
|
vodozhaba/VV4OS,velikiyv4/VV4OS,vodozhaba/VV4OS,velikiyv4/VV4OS
|
c
|
## Code Before:
// File: stdlib/stdio.h
// Author: vodozhaba
// Created on: Aug 21, 2016
// Purpose: Provides standard I/O functions.
#pragma once
#include <stdarg.h>
#include "io/disk/file.h"
extern FileDescriptor* stdout;
extern FileDescriptor* stderr;
size_t StdoutWriteOp(FileDescriptor* file, size_t size, const void* buf);
size_t StderrWriteOp(FileDescriptor* file, size_t size, const void* buf);
int putchar(int character);
int _puts(const char* s);
int isspace (int c);
int fprintf(FileDescriptor* file, const char* fmt, ...);
int printf(const char* fmt, ...);
int sprintf(char* dest, const char* fmt, ...);
## Instruction:
Add vfprintf to the public interface just in case
## Code After:
// File: stdlib/stdio.h
// Author: vodozhaba
// Created on: Aug 21, 2016
// Purpose: Provides standard I/O functions.
#pragma once
#include <stdarg.h>
#include "io/disk/file.h"
extern FileDescriptor* stdout;
extern FileDescriptor* stderr;
size_t StdoutWriteOp(FileDescriptor* file, size_t size, const void* buf);
size_t StderrWriteOp(FileDescriptor* file, size_t size, const void* buf);
int putchar(int character);
int _puts(const char* s);
int isspace (int c);
int fprintf(FileDescriptor* file, const char* fmt, ...);
int vfprintf(FileDescriptor* file, const char* fmt, va_list argss);
int printf(const char* fmt, ...);
int sprintf(char* dest, const char* fmt, ...);
|
# ... existing code ...
int _puts(const char* s);
int isspace (int c);
int fprintf(FileDescriptor* file, const char* fmt, ...);
int vfprintf(FileDescriptor* file, const char* fmt, va_list argss);
int printf(const char* fmt, ...);
int sprintf(char* dest, const char* fmt, ...);
# ... rest of the code ...
|
0c0a1d0ec480c7df9dd8821d40af7791e46db453
|
tests/lib/test_finance.py
|
tests/lib/test_finance.py
|
from tests import OldPythonTestCase
__author__ = 'felix_kluge'
from pycroft.lib.finance import create_semester
from pycroft.lib.config import get,config
from pycroft.model.finance import FinanceAccount
from sqlalchemy.orm import backref
from pycroft.model import session
import time
from datetime import date
class Test_010_Semester(OldPythonTestCase):
def test_0010_create_semester_accounts(self):
"""
This test should verify that all semester-related finance-accounts have
been created.
"""
new_semester = create_semester("NewSemesterName", 2500, 1500, date(2013, 9, 1), date(2014, 2, 1))
config._configpath = "../tests/example/test_config.json"
for account in config["finance"]["semester_accounts"]:
for new_account in new_semester.accounts:
if(new_account.tag == account["tag"]):
new_account_equivalent = new_account
compare_account = FinanceAccount(type=account["type"],name=account["name"],semester=new_semester,tag=account["tag"])
self.assertEqual(new_account_equivalent.name, compare_account.name)
self.assertEqual(new_account_equivalent.type, compare_account.type)
|
from tests import OldPythonTestCase
__author__ = 'felix_kluge'
from pycroft.lib.finance import create_semester, import_csv
from pycroft.lib.config import get, config
from pycroft.model.finance import FinanceAccount, Journal, JournalEntry
from sqlalchemy.orm import backref
from pycroft.model import session
import time
from datetime import date, datetime
class Test_010_Semester(OldPythonTestCase):
def test_0010_create_semester_accounts(self):
"""
This test should verify that all semester-related finance-accounts have
been created.
"""
new_semester = create_semester("NewSemesterName",
2500, 1500,
date(2013, 9, 1),
date(2014, 2, 1))
config._configpath = "../tests/example/test_config.json"
for account in config["finance"]["semester_accounts"]:
new_created_account = FinanceAccount.q.filter(
FinanceAccount.semester == new_semester,
FinanceAccount.tag == account["tag"]).first()
self.assertEqual(new_created_account.name, account["name"])
self.assertEqual(new_created_account.type, account["type"])
session.session.commit()
|
Fix for wrong test: create_semester_accounts
|
Fix for wrong test: create_semester_accounts
refs #448
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
python
|
## Code Before:
from tests import OldPythonTestCase
__author__ = 'felix_kluge'
from pycroft.lib.finance import create_semester
from pycroft.lib.config import get,config
from pycroft.model.finance import FinanceAccount
from sqlalchemy.orm import backref
from pycroft.model import session
import time
from datetime import date
class Test_010_Semester(OldPythonTestCase):
def test_0010_create_semester_accounts(self):
"""
This test should verify that all semester-related finance-accounts have
been created.
"""
new_semester = create_semester("NewSemesterName", 2500, 1500, date(2013, 9, 1), date(2014, 2, 1))
config._configpath = "../tests/example/test_config.json"
for account in config["finance"]["semester_accounts"]:
for new_account in new_semester.accounts:
if(new_account.tag == account["tag"]):
new_account_equivalent = new_account
compare_account = FinanceAccount(type=account["type"],name=account["name"],semester=new_semester,tag=account["tag"])
self.assertEqual(new_account_equivalent.name, compare_account.name)
self.assertEqual(new_account_equivalent.type, compare_account.type)
## Instruction:
Fix for wrong test: create_semester_accounts
refs #448
## Code After:
from tests import OldPythonTestCase
__author__ = 'felix_kluge'
from pycroft.lib.finance import create_semester, import_csv
from pycroft.lib.config import get, config
from pycroft.model.finance import FinanceAccount, Journal, JournalEntry
from sqlalchemy.orm import backref
from pycroft.model import session
import time
from datetime import date, datetime
class Test_010_Semester(OldPythonTestCase):
def test_0010_create_semester_accounts(self):
"""
This test should verify that all semester-related finance-accounts have
been created.
"""
new_semester = create_semester("NewSemesterName",
2500, 1500,
date(2013, 9, 1),
date(2014, 2, 1))
config._configpath = "../tests/example/test_config.json"
for account in config["finance"]["semester_accounts"]:
new_created_account = FinanceAccount.q.filter(
FinanceAccount.semester == new_semester,
FinanceAccount.tag == account["tag"]).first()
self.assertEqual(new_created_account.name, account["name"])
self.assertEqual(new_created_account.type, account["type"])
session.session.commit()
|
// ... existing code ...
__author__ = 'felix_kluge'
from pycroft.lib.finance import create_semester, import_csv
from pycroft.lib.config import get, config
from pycroft.model.finance import FinanceAccount, Journal, JournalEntry
from sqlalchemy.orm import backref
from pycroft.model import session
import time
from datetime import date, datetime
class Test_010_Semester(OldPythonTestCase):
// ... modified code ...
This test should verify that all semester-related finance-accounts have
been created.
"""
new_semester = create_semester("NewSemesterName",
2500, 1500,
date(2013, 9, 1),
date(2014, 2, 1))
config._configpath = "../tests/example/test_config.json"
for account in config["finance"]["semester_accounts"]:
new_created_account = FinanceAccount.q.filter(
FinanceAccount.semester == new_semester,
FinanceAccount.tag == account["tag"]).first()
self.assertEqual(new_created_account.name, account["name"])
self.assertEqual(new_created_account.type, account["type"])
session.session.commit()
// ... rest of the code ...
|
f7611e37ef1e0dfaa568515be365d50b3edbd11c
|
ccdproc/conftest.py
|
ccdproc/conftest.py
|
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
try:
# This is the way to get plugins in astropy 2.x
from astropy.tests.pytest_plugins import *
except ImportError:
# Otherwise they are installed as separate packages that pytest
# automagically finds.
pass
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
Fix plugin import for astropy 2.x
|
Fix plugin import for astropy 2.x
|
Python
|
bsd-3-clause
|
astropy/ccdproc,mwcraig/ccdproc
|
python
|
## Code Before:
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
## Instruction:
Fix plugin import for astropy 2.x
## Code After:
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
try:
# This is the way to get plugins in astropy 2.x
from astropy.tests.pytest_plugins import *
except ImportError:
# Otherwise they are installed as separate packages that pytest
# automagically finds.
pass
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
...
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
try:
# This is the way to get plugins in astropy 2.x
from astropy.tests.pytest_plugins import *
except ImportError:
# Otherwise they are installed as separate packages that pytest
# automagically finds.
pass
from .tests.pytest_fixtures import *
...
|
9275bc3a9373e453e06f0aebc883a773dfb97627
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from tiddlywebwiki import __version__ as VERSION
setup(
name = 'tiddlywebwiki',
version = VERSION,
description = 'A TiddlyWeb plugin to provide a multi-user TiddlyWiki environment.',
author = 'FND',
author_email = '[email protected]',
packages = find_packages(exclude=['test']),
scripts = ['twinstance'],
platforms = 'Posix; MacOS X; Windows',
install_requires = [
'tiddlyweb>=0.9.96',
'tiddlywebplugins.wikklytextrender',
'tiddlywebplugins.status>=0.5',
'tiddlywebplugins.differ',
'tiddlywebplugins.atom',
'tiddlywebplugins.twimport',
'tiddlywebplugins.utils',
'tiddlywebplugins.instancer>=0.5.5',
'BeautifulSoup',
'wikklytext'],
include_package_data = True,
zip_safe = False
)
|
from setuptools import setup, find_packages
from tiddlywebwiki import __version__ as VERSION
setup(
name = 'tiddlywebwiki',
version = VERSION,
description = 'A TiddlyWeb plugin to provide a multi-user TiddlyWiki environment.',
author = 'FND',
author_email = '[email protected]',
packages = find_packages(exclude=['test']),
scripts = ['twinstance'],
platforms = 'Posix; MacOS X; Windows',
install_requires = [
'tiddlyweb>=0.9.96',
'tiddlywebplugins.wikklytextrender',
'tiddlywebplugins.status>=0.5',
'tiddlywebplugins.differ',
'tiddlywebplugins.atom',
'tiddlywebplugins.twimport',
'tiddlywebplugins.utils',
'tiddlywebplugins.instancer>=0.5.5',
'wikklytext'],
include_package_data = True,
zip_safe = False
)
|
Remove BeautifulSoup from direct dependency list.
|
Remove BeautifulSoup from direct dependency list.
|
Python
|
bsd-3-clause
|
tiddlyweb/tiddlywebwiki,tiddlyweb/tiddlywebwiki,tiddlyweb/tiddlywebwiki
|
python
|
## Code Before:
from setuptools import setup, find_packages
from tiddlywebwiki import __version__ as VERSION
setup(
name = 'tiddlywebwiki',
version = VERSION,
description = 'A TiddlyWeb plugin to provide a multi-user TiddlyWiki environment.',
author = 'FND',
author_email = '[email protected]',
packages = find_packages(exclude=['test']),
scripts = ['twinstance'],
platforms = 'Posix; MacOS X; Windows',
install_requires = [
'tiddlyweb>=0.9.96',
'tiddlywebplugins.wikklytextrender',
'tiddlywebplugins.status>=0.5',
'tiddlywebplugins.differ',
'tiddlywebplugins.atom',
'tiddlywebplugins.twimport',
'tiddlywebplugins.utils',
'tiddlywebplugins.instancer>=0.5.5',
'BeautifulSoup',
'wikklytext'],
include_package_data = True,
zip_safe = False
)
## Instruction:
Remove BeautifulSoup from direct dependency list.
## Code After:
from setuptools import setup, find_packages
from tiddlywebwiki import __version__ as VERSION
setup(
name = 'tiddlywebwiki',
version = VERSION,
description = 'A TiddlyWeb plugin to provide a multi-user TiddlyWiki environment.',
author = 'FND',
author_email = '[email protected]',
packages = find_packages(exclude=['test']),
scripts = ['twinstance'],
platforms = 'Posix; MacOS X; Windows',
install_requires = [
'tiddlyweb>=0.9.96',
'tiddlywebplugins.wikklytextrender',
'tiddlywebplugins.status>=0.5',
'tiddlywebplugins.differ',
'tiddlywebplugins.atom',
'tiddlywebplugins.twimport',
'tiddlywebplugins.utils',
'tiddlywebplugins.instancer>=0.5.5',
'wikklytext'],
include_package_data = True,
zip_safe = False
)
|
# ... existing code ...
'tiddlywebplugins.twimport',
'tiddlywebplugins.utils',
'tiddlywebplugins.instancer>=0.5.5',
'wikklytext'],
include_package_data = True,
zip_safe = False
# ... rest of the code ...
|
f4cfad2edaa896b471f4f44b2a3fda2bd6b1bb49
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
from flask import Flask, jsonify
@pytest.fixture
def app():
app = Flask(__name__)
@app.route('/ping')
def ping():
return jsonify(ping='pong')
return app
|
import pytest
from flask import Flask, jsonify
@pytest.fixture
def app():
app = Flask(__name__)
@app.route('/')
def index():
return app.response_class('OK')
@app.route('/ping')
def ping():
return jsonify(ping='pong')
return app
|
Add index route to test application
|
Add index route to test application
This endpoint uses to start :class:`LiveServer` instance with minimum
waiting timeout.
|
Python
|
mit
|
amateja/pytest-flask
|
python
|
## Code Before:
import pytest
from flask import Flask, jsonify
@pytest.fixture
def app():
app = Flask(__name__)
@app.route('/ping')
def ping():
return jsonify(ping='pong')
return app
## Instruction:
Add index route to test application
This endpoint uses to start :class:`LiveServer` instance with minimum
waiting timeout.
## Code After:
import pytest
from flask import Flask, jsonify
@pytest.fixture
def app():
app = Flask(__name__)
@app.route('/')
def index():
return app.response_class('OK')
@app.route('/ping')
def ping():
return jsonify(ping='pong')
return app
|
...
def app():
app = Flask(__name__)
@app.route('/')
def index():
return app.response_class('OK')
@app.route('/ping')
def ping():
return jsonify(ping='pong')
...
|
0c6bf8eca2d4cfd08cc98df3cb0ab706a6fbf7a2
|
cxfreeze-setup.py
|
cxfreeze-setup.py
|
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget", "pickle"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
Fix a TypeError when load mergeTool setting
|
Fix a TypeError when load mergeTool setting
cx-freeze isn't include pickle module on the Windows platform,
it cause "TypeError: unable to convert a C++ 'QVariantList'
instance to a Python object"
|
Python
|
apache-2.0
|
timxx/gitc,timxx/gitc
|
python
|
## Code Before:
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
## Instruction:
Fix a TypeError when load mergeTool setting
cx-freeze isn't include pickle module on the Windows platform,
it cause "TypeError: unable to convert a C++ 'QVariantList'
instance to a Python object"
## Code After:
import sys
from glob import glob
from cx_Freeze import setup, Executable
from version import VERSION
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget", "pickle"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
for qm in glob("data/translations/*.qm"):
includeFiles.append((qm, qm))
buildOptions = dict(
packages=[],
excludes=excludes,
includes=includes,
include_files=includeFiles,
include_msvcr=True,
silent=True)
base = None
icon = None
if sys.platform == "win32":
base = "Win32GUI"
icon = "data/icons/gitc.ico"
executables = [
Executable('gitc',
base=base,
icon=icon
)]
setup(name='gitc',
version=VERSION,
description='A file conflict viewer for git',
options=dict(build_exe=buildOptions),
executables=executables,
)
|
...
# Dependencies are automatically detected, but it might need
# fine tuning.
excludes = ["Tkinter"]
includes = ["logview", "colorwidget", "pickle"]
includeFiles = [("data/icons/gitc.svg", "data/icons/gitc.svg"),
("data/licenses/Apache-2.0.html", "data/licenses/Apache-2.0.html"),
("LICENSE", "data/licenses/LICENSE")]
...
|
40dc1250bf73b54dfcf04c7a82c452a731aa363c
|
tests/unit/blocks/test_two_column_layout_block.py
|
tests/unit/blocks/test_two_column_layout_block.py
|
import mock
from django.test import TestCase
from django.template import RequestContext
from fancypages.models import Container
from fancypages.models.blocks import TwoColumnLayoutBlock
from fancypages.test import factories
class TestTwoColumnLayoutBlock(TestCase):
def setUp(self):
super(TestTwoColumnLayoutBlock, self).setUp()
self.user = factories.UserFactory.build()
self.request_context = RequestContext(mock.MagicMock())
self.request_context['user'] = self.user
def test_generates_two_empty_containers_when_rendered(self):
container = Container.objects.create(name='test-container')
block = TwoColumnLayoutBlock.objects.create(container=container)
self.assertEquals(block.containers.count(), 0)
renderer = block.get_renderer_class()(block, self.request_context)
block_html = renderer.render()
self.assertEquals(block.containers.count(), 2)
|
import mock
from django.test import TestCase
from django.template import RequestContext
from fancypages.models import Container
from fancypages.models.blocks import TwoColumnLayoutBlock
from fancypages.test import factories
class TestTwoColumnLayoutBlock(TestCase):
def setUp(self):
super(TestTwoColumnLayoutBlock, self).setUp()
self.user = factories.UserFactory.build()
self.request = mock.Mock()
self.request.META = {}
self.request_context = RequestContext(self.request, {})
self.request_context['user'] = self.user
def test_generates_two_empty_containers_when_rendered(self):
container = Container.objects.create(name='test-container')
block = TwoColumnLayoutBlock.objects.create(container=container)
self.assertEquals(block.containers.count(), 0)
renderer = block.get_renderer_class()(block, self.request_context)
renderer.render()
self.assertEquals(block.containers.count(), 2)
|
Fix mock of request for block rendering
|
Fix mock of request for block rendering
|
Python
|
bsd-3-clause
|
socradev/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages
|
python
|
## Code Before:
import mock
from django.test import TestCase
from django.template import RequestContext
from fancypages.models import Container
from fancypages.models.blocks import TwoColumnLayoutBlock
from fancypages.test import factories
class TestTwoColumnLayoutBlock(TestCase):
def setUp(self):
super(TestTwoColumnLayoutBlock, self).setUp()
self.user = factories.UserFactory.build()
self.request_context = RequestContext(mock.MagicMock())
self.request_context['user'] = self.user
def test_generates_two_empty_containers_when_rendered(self):
container = Container.objects.create(name='test-container')
block = TwoColumnLayoutBlock.objects.create(container=container)
self.assertEquals(block.containers.count(), 0)
renderer = block.get_renderer_class()(block, self.request_context)
block_html = renderer.render()
self.assertEquals(block.containers.count(), 2)
## Instruction:
Fix mock of request for block rendering
## Code After:
import mock
from django.test import TestCase
from django.template import RequestContext
from fancypages.models import Container
from fancypages.models.blocks import TwoColumnLayoutBlock
from fancypages.test import factories
class TestTwoColumnLayoutBlock(TestCase):
def setUp(self):
super(TestTwoColumnLayoutBlock, self).setUp()
self.user = factories.UserFactory.build()
self.request = mock.Mock()
self.request.META = {}
self.request_context = RequestContext(self.request, {})
self.request_context['user'] = self.user
def test_generates_two_empty_containers_when_rendered(self):
container = Container.objects.create(name='test-container')
block = TwoColumnLayoutBlock.objects.create(container=container)
self.assertEquals(block.containers.count(), 0)
renderer = block.get_renderer_class()(block, self.request_context)
renderer.render()
self.assertEquals(block.containers.count(), 2)
|
// ... existing code ...
def setUp(self):
super(TestTwoColumnLayoutBlock, self).setUp()
self.user = factories.UserFactory.build()
self.request = mock.Mock()
self.request.META = {}
self.request_context = RequestContext(self.request, {})
self.request_context['user'] = self.user
def test_generates_two_empty_containers_when_rendered(self):
// ... modified code ...
self.assertEquals(block.containers.count(), 0)
renderer = block.get_renderer_class()(block, self.request_context)
renderer.render()
self.assertEquals(block.containers.count(), 2)
// ... rest of the code ...
|
711b144ed5c1bb22def8f0d7a46eb58a0bbc9bb6
|
test/tools/llvm-symbolizer/print_context.c
|
test/tools/llvm-symbolizer/print_context.c
|
// REQUIRES: x86_64-linux
// RUN: %host_cc -O0 -g %s -o %t 2>&1
// RUN: %t 2>&1 | llvm-symbolizer -print-source-context-lines=5 -obj=%t | FileCheck %s
//
// See PR31870 for more details on the XFAIL
// XFAIL: avr
#include <stdio.h>
int inc(int a) {
return a + 1;
}
int main() {
printf("%p\n", inc);
return 0;
}
// CHECK: inc
// CHECK: print_context.c:7
// CHECK: 5 : #include
// CHECK: 6 :
// CHECK: 7 >: int inc
// CHECK: 8 : return
// CHECK: 9 : }
|
// REQUIRES: x86_64-linux
// RUN: %host_cc -O0 -g %s -o %t 2>&1
// RUN: %t 2>&1 | llvm-symbolizer -print-source-context-lines=5 -obj=%t | FileCheck %s
#include <stdio.h>
int inc(int a) {
return a + 1;
}
int main() {
printf("%p\n", inc);
return 0;
}
// CHECK: inc
// CHECK: print_context.c:7
// CHECK: 5 : #include
// CHECK: 6 :
// CHECK: 7 >: int inc
// CHECK: 8 : return
// CHECK: 9 : }
|
Revert "[AVR] Mark a failing symbolizer test as XFAIL"
|
Revert "[AVR] Mark a failing symbolizer test as XFAIL"
This reverts commit 83a0e876349adb646ba858eb177b22b0b4bfc59a.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@309515 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm
|
c
|
## Code Before:
// REQUIRES: x86_64-linux
// RUN: %host_cc -O0 -g %s -o %t 2>&1
// RUN: %t 2>&1 | llvm-symbolizer -print-source-context-lines=5 -obj=%t | FileCheck %s
//
// See PR31870 for more details on the XFAIL
// XFAIL: avr
#include <stdio.h>
int inc(int a) {
return a + 1;
}
int main() {
printf("%p\n", inc);
return 0;
}
// CHECK: inc
// CHECK: print_context.c:7
// CHECK: 5 : #include
// CHECK: 6 :
// CHECK: 7 >: int inc
// CHECK: 8 : return
// CHECK: 9 : }
## Instruction:
Revert "[AVR] Mark a failing symbolizer test as XFAIL"
This reverts commit 83a0e876349adb646ba858eb177b22b0b4bfc59a.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@309515 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// REQUIRES: x86_64-linux
// RUN: %host_cc -O0 -g %s -o %t 2>&1
// RUN: %t 2>&1 | llvm-symbolizer -print-source-context-lines=5 -obj=%t | FileCheck %s
#include <stdio.h>
int inc(int a) {
return a + 1;
}
int main() {
printf("%p\n", inc);
return 0;
}
// CHECK: inc
// CHECK: print_context.c:7
// CHECK: 5 : #include
// CHECK: 6 :
// CHECK: 7 >: int inc
// CHECK: 8 : return
// CHECK: 9 : }
|
...
// REQUIRES: x86_64-linux
// RUN: %host_cc -O0 -g %s -o %t 2>&1
// RUN: %t 2>&1 | llvm-symbolizer -print-source-context-lines=5 -obj=%t | FileCheck %s
#include <stdio.h>
...
|
9564692c1044779467e926f830b8f28e1661cb73
|
setup.py
|
setup.py
|
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='[email protected]',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='[email protected]',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
Remove per patch version classifiers
|
Remove per patch version classifiers
|
Python
|
mit
|
eerimoq/argparse_addons
|
python
|
## Code Before:
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='[email protected]',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
## Instruction:
Remove per patch version classifiers
## Code After:
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='[email protected]',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires='>=3.6',
install_requires=[
],
test_suite="tests")
|
# ... existing code ...
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
# ... rest of the code ...
|
20147b8b8a80ef8ab202d916bf1cdfb67d4753d3
|
SelfTests.py
|
SelfTests.py
|
import os
import unittest
from Logger import Logger
class TestLogger(unittest.TestCase):
def test_file_handling(self):
testLog = Logger("testLog")
## Check if program can create and open file
self.assertTrue(testLog.opened)
returns = testLog.close()
## Check if logger correctly signs bool OPENED and returns
## 0 as succes.
self.assertFalse(testLog.opened)
self.assertEqual(returns,0)
returns = testLog.close()
## Check if logger returns 1 when trying to close already
## closed file
self.assertEqual(returns,1)
## Do cleanup:
os.remove(testLog.name)
def test_logging(self):
testLog = Logger("testLog")
testLog.save_line("TestLine")
testLog.close()
logfile = open(testLog.name)
content = logfile.read()
logfile.close()
saved = content.split(" : ")
self.assertEqual(saved[1],"TestLine")
## cleanup
os.remove(testLog.name)
if __name__ == '__main__':
unittest.main()
|
import os
import unittest
from Logger import Logger
class TestLogger(unittest.TestCase):
def test_file_handling(self):
testLog = Logger("testLog")
## Check if program can create and open file
self.assertTrue(testLog.opened)
returns = testLog.close()
## Check if logger correctly signs bool OPENED and returns
## 0 as succes.
self.assertFalse(testLog.opened)
self.assertEqual(returns,0)
returns = testLog.close()
## Check if logger returns 1 when trying to close already
## closed file
self.assertEqual(returns,1)
## Do cleanup:
os.remove(testLog.name)
def test_logging(self):
testLog = Logger("testLog")
testPhrase = "TestLine\r\n"
testLog.save_line(testPhrase)
testLog.close()
logfile = open(testLog.name)
content = logfile.read()
logfile.close()
saved = content.split(" : ")
## Check if saved data corresponds
self.assertEqual(saved[1],testPhrase)
## cleanup
os.remove(testLog.name)
if __name__ == '__main__':
unittest.main()
|
Test of logger is testing an testPhrase instead of two manually writen strings
|
Test of logger is testing an testPhrase instead of two manually writen strings
Signed-off-by: TeaPackCZ <[email protected]>
|
Python
|
mit
|
TeaPackCZ/RobotZed,TeaPackCZ/RobotZed
|
python
|
## Code Before:
import os
import unittest
from Logger import Logger
class TestLogger(unittest.TestCase):
def test_file_handling(self):
testLog = Logger("testLog")
## Check if program can create and open file
self.assertTrue(testLog.opened)
returns = testLog.close()
## Check if logger correctly signs bool OPENED and returns
## 0 as succes.
self.assertFalse(testLog.opened)
self.assertEqual(returns,0)
returns = testLog.close()
## Check if logger returns 1 when trying to close already
## closed file
self.assertEqual(returns,1)
## Do cleanup:
os.remove(testLog.name)
def test_logging(self):
testLog = Logger("testLog")
testLog.save_line("TestLine")
testLog.close()
logfile = open(testLog.name)
content = logfile.read()
logfile.close()
saved = content.split(" : ")
self.assertEqual(saved[1],"TestLine")
## cleanup
os.remove(testLog.name)
if __name__ == '__main__':
unittest.main()
## Instruction:
Test of logger is testing an testPhrase instead of two manually writen strings
Signed-off-by: TeaPackCZ <[email protected]>
## Code After:
import os
import unittest
from Logger import Logger
class TestLogger(unittest.TestCase):
def test_file_handling(self):
testLog = Logger("testLog")
## Check if program can create and open file
self.assertTrue(testLog.opened)
returns = testLog.close()
## Check if logger correctly signs bool OPENED and returns
## 0 as succes.
self.assertFalse(testLog.opened)
self.assertEqual(returns,0)
returns = testLog.close()
## Check if logger returns 1 when trying to close already
## closed file
self.assertEqual(returns,1)
## Do cleanup:
os.remove(testLog.name)
def test_logging(self):
testLog = Logger("testLog")
testPhrase = "TestLine\r\n"
testLog.save_line(testPhrase)
testLog.close()
logfile = open(testLog.name)
content = logfile.read()
logfile.close()
saved = content.split(" : ")
## Check if saved data corresponds
self.assertEqual(saved[1],testPhrase)
## cleanup
os.remove(testLog.name)
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
def test_logging(self):
testLog = Logger("testLog")
testPhrase = "TestLine\r\n"
testLog.save_line(testPhrase)
testLog.close()
logfile = open(testLog.name)
content = logfile.read()
logfile.close()
saved = content.split(" : ")
## Check if saved data corresponds
self.assertEqual(saved[1],testPhrase)
## cleanup
os.remove(testLog.name)
// ... rest of the code ...
|
4058e2d2b3187c6893192de33cc2d04a6cadb2f5
|
src/main/ca/ubc/cs411/abe/If.java
|
src/main/ca/ubc/cs411/abe/If.java
|
package ca.ubc.cs411.abe;
public class If extends ABE {
private ABE pred, conseq, altern;
public If(ABE pred, ABE conseq, ABE altern) {
if (pred.typeOf() != Type.BOOL) {
throw new Error("If constructor passed a non-Bool predicate: " + pred);
} else if (conseq.typeOf() != altern.typeOf()) {
throw new Error("If constructor passed consequence and alternative expressions with different types: " + conseq + ", " + altern);
}
this.pred = pred;
this.conseq = conseq;
this.altern = altern;
}
@Override
public Value interp() {
if (pred.interp().toBool()) {
return conseq.interp();
}
return altern.interp();
}
@Override
public Type typeOf() {
// Invariant: conseq and altern have the same type
return conseq.typeOf();
}
@Override
public String toString() {
return "If(" + pred + "," + conseq + "," + altern + ")";
}
}
|
package ca.ubc.cs411.abe;
public class If extends ABE {
private ABE pred, conseq, altern;
public If(ABE pred, ABE conseq, ABE altern) {
this.pred = pred;
this.conseq = conseq;
this.altern = altern;
}
@Override
public Value interp() {
if (pred.interp().toBool()) {
return conseq.interp();
}
return altern.interp();
}
@Override
public Type typeOf() {
if (pred.typeOf() != Type.BOOL) {
throw new Error("If constructor passed a non-Bool predicate: " + pred);
} else if (conseq.typeOf() != altern.typeOf()) {
throw new Error("If constructor passed consequence and alternative expressions with different types: " + conseq + ", " + altern);
}
return conseq.typeOf();
}
@Override
public String toString() {
return "If(" + pred + "," + conseq + "," + altern + ")";
}
}
|
Allow any ABE expressions to be used for predicate/consequence/alternative expressions, only throw type checking errors in typeOf
|
Allow any ABE expressions to be used for predicate/consequence/alternative
expressions, only throw type checking errors in typeOf
|
Java
|
mit
|
msayson/cpsc411
|
java
|
## Code Before:
package ca.ubc.cs411.abe;
public class If extends ABE {
private ABE pred, conseq, altern;
public If(ABE pred, ABE conseq, ABE altern) {
if (pred.typeOf() != Type.BOOL) {
throw new Error("If constructor passed a non-Bool predicate: " + pred);
} else if (conseq.typeOf() != altern.typeOf()) {
throw new Error("If constructor passed consequence and alternative expressions with different types: " + conseq + ", " + altern);
}
this.pred = pred;
this.conseq = conseq;
this.altern = altern;
}
@Override
public Value interp() {
if (pred.interp().toBool()) {
return conseq.interp();
}
return altern.interp();
}
@Override
public Type typeOf() {
// Invariant: conseq and altern have the same type
return conseq.typeOf();
}
@Override
public String toString() {
return "If(" + pred + "," + conseq + "," + altern + ")";
}
}
## Instruction:
Allow any ABE expressions to be used for predicate/consequence/alternative
expressions, only throw type checking errors in typeOf
## Code After:
package ca.ubc.cs411.abe;
public class If extends ABE {
private ABE pred, conseq, altern;
public If(ABE pred, ABE conseq, ABE altern) {
this.pred = pred;
this.conseq = conseq;
this.altern = altern;
}
@Override
public Value interp() {
if (pred.interp().toBool()) {
return conseq.interp();
}
return altern.interp();
}
@Override
public Type typeOf() {
if (pred.typeOf() != Type.BOOL) {
throw new Error("If constructor passed a non-Bool predicate: " + pred);
} else if (conseq.typeOf() != altern.typeOf()) {
throw new Error("If constructor passed consequence and alternative expressions with different types: " + conseq + ", " + altern);
}
return conseq.typeOf();
}
@Override
public String toString() {
return "If(" + pred + "," + conseq + "," + altern + ")";
}
}
|
# ... existing code ...
private ABE pred, conseq, altern;
public If(ABE pred, ABE conseq, ABE altern) {
this.pred = pred;
this.conseq = conseq;
this.altern = altern;
# ... modified code ...
@Override
public Type typeOf() {
if (pred.typeOf() != Type.BOOL) {
throw new Error("If constructor passed a non-Bool predicate: " + pred);
} else if (conseq.typeOf() != altern.typeOf()) {
throw new Error("If constructor passed consequence and alternative expressions with different types: " + conseq + ", " + altern);
}
return conseq.typeOf();
}
# ... rest of the code ...
|
beeae2daf35da275d5f9e1ad01516c917319bf00
|
gapipy/resources/geo/state.py
|
gapipy/resources/geo/state.py
|
from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [('country', 'Country')]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
|
from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [
('country', 'Country'),
('place', 'Place'),
]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
|
Add Place reference to State model
|
Add Place reference to State model
|
Python
|
mit
|
gadventures/gapipy
|
python
|
## Code Before:
from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [('country', 'Country')]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
## Instruction:
Add Place reference to State model
## Code After:
from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [
('country', 'Country'),
('place', 'Place'),
]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
|
...
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [
('country', 'Country'),
('place', 'Place'),
]
@enforce_string_type
def __repr__(self):
...
|
851579b14a34b8acc1977b2f4d2c991d8e5f5f2c
|
ledlight.py
|
ledlight.py
|
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.1
duration = 4
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
series.append((GPIO.input(pin_switch)) ^ 1)
sleep(period)
print "outputting"
print series
GPIO.cleanup()
|
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.25
duration = 30
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
print (GPIO.input(pin_switch)) ^ 1
sleep(period)
print
GPIO.cleanup()
|
Print out 0/1 values as we sense them real-time.
|
Print out 0/1 values as we sense them real-time.
|
Python
|
mit
|
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
|
python
|
## Code Before:
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.1
duration = 4
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
series.append((GPIO.input(pin_switch)) ^ 1)
sleep(period)
print "outputting"
print series
GPIO.cleanup()
## Instruction:
Print out 0/1 values as we sense them real-time.
## Code After:
import RPi.GPIO as GPIO
from time import sleep
pin_switch = 12
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.25
duration = 30
samples = int(duration / float(period))
freq = 1.0 / period
series = []
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
print (GPIO.input(pin_switch)) ^ 1
sleep(period)
print
GPIO.cleanup()
|
...
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin_switch, GPIO.IN)
period = 0.25
duration = 30
samples = int(duration / float(period))
freq = 1.0 / period
...
print "inputting", samples, "samples,", "at", freq, "Hz"
for i in range(samples):
print (GPIO.input(pin_switch)) ^ 1
sleep(period)
print
GPIO.cleanup()
...
|
a6a0ff97fb908e8c32387a9af89fe2a87fd00954
|
src/main/java/net/elprespufferfish/rssreader/DatabaseSchema.java
|
src/main/java/net/elprespufferfish/rssreader/DatabaseSchema.java
|
package net.elprespufferfish.rssreader;
import android.provider.BaseColumns;
public class DatabaseSchema {
public static interface FeedTable extends BaseColumns {
public static final String TABLE_NAME = "feeds";
public static final String FEED_NAME = "feed_name";
public static final String FEED_URL = "feed_url";
}
public static interface ArticleTable extends BaseColumns {
public static final String TABLE_NAME = "articles";
public static final String ARTICLE_FEED = "article_feed";
public static final String ARTICLE_NAME = "article_name";
public static final String ARTICLE_URL = "article_url";
public static final String ARTICLE_PUBLICATION_DATE = "article_pubdate";
public static final String ARTICLE_DESCRIPTION = "article_description";
public static final String ARTICLE_GUID = "article_guid";
public static final String ARTICLE_IS_READ = "article_is_read";
}
private DatabaseSchema() {
// prevent instantiations
}
}
|
package net.elprespufferfish.rssreader;
import android.provider.BaseColumns;
public class DatabaseSchema {
public interface FeedTable extends BaseColumns {
String TABLE_NAME = "feeds";
String FEED_NAME = "feed_name";
String FEED_URL = "feed_url";
}
public interface ArticleTable extends BaseColumns {
String TABLE_NAME = "articles";
String ARTICLE_FEED = "article_feed";
String ARTICLE_NAME = "article_name";
String ARTICLE_URL = "article_url";
String ARTICLE_PUBLICATION_DATE = "article_pubdate";
String ARTICLE_DESCRIPTION = "article_description";
String ARTICLE_GUID = "article_guid";
String ARTICLE_IS_READ = "article_is_read";
}
private DatabaseSchema() {
// prevent instantiations
}
}
|
Remove redundant modifiers for interface fields
|
Remove redundant modifiers for interface fields
|
Java
|
mit
|
elprespufferfish/rss-reader
|
java
|
## Code Before:
package net.elprespufferfish.rssreader;
import android.provider.BaseColumns;
public class DatabaseSchema {
public static interface FeedTable extends BaseColumns {
public static final String TABLE_NAME = "feeds";
public static final String FEED_NAME = "feed_name";
public static final String FEED_URL = "feed_url";
}
public static interface ArticleTable extends BaseColumns {
public static final String TABLE_NAME = "articles";
public static final String ARTICLE_FEED = "article_feed";
public static final String ARTICLE_NAME = "article_name";
public static final String ARTICLE_URL = "article_url";
public static final String ARTICLE_PUBLICATION_DATE = "article_pubdate";
public static final String ARTICLE_DESCRIPTION = "article_description";
public static final String ARTICLE_GUID = "article_guid";
public static final String ARTICLE_IS_READ = "article_is_read";
}
private DatabaseSchema() {
// prevent instantiations
}
}
## Instruction:
Remove redundant modifiers for interface fields
## Code After:
package net.elprespufferfish.rssreader;
import android.provider.BaseColumns;
public class DatabaseSchema {
public interface FeedTable extends BaseColumns {
String TABLE_NAME = "feeds";
String FEED_NAME = "feed_name";
String FEED_URL = "feed_url";
}
public interface ArticleTable extends BaseColumns {
String TABLE_NAME = "articles";
String ARTICLE_FEED = "article_feed";
String ARTICLE_NAME = "article_name";
String ARTICLE_URL = "article_url";
String ARTICLE_PUBLICATION_DATE = "article_pubdate";
String ARTICLE_DESCRIPTION = "article_description";
String ARTICLE_GUID = "article_guid";
String ARTICLE_IS_READ = "article_is_read";
}
private DatabaseSchema() {
// prevent instantiations
}
}
|
...
public class DatabaseSchema {
public interface FeedTable extends BaseColumns {
String TABLE_NAME = "feeds";
String FEED_NAME = "feed_name";
String FEED_URL = "feed_url";
}
public interface ArticleTable extends BaseColumns {
String TABLE_NAME = "articles";
String ARTICLE_FEED = "article_feed";
String ARTICLE_NAME = "article_name";
String ARTICLE_URL = "article_url";
String ARTICLE_PUBLICATION_DATE = "article_pubdate";
String ARTICLE_DESCRIPTION = "article_description";
String ARTICLE_GUID = "article_guid";
String ARTICLE_IS_READ = "article_is_read";
}
private DatabaseSchema() {
...
|
ea6a84cee4f452f4503c6ce0fdd04b77d9017bdd
|
tinyblog/management/commands/import_tinyblog.py
|
tinyblog/management/commands/import_tinyblog.py
|
from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
r = requests.get(url)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
|
from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from requests import exceptions
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
try:
r = requests.get(url)
except exceptions.MissingSchema as e:
raise CommandError(e.message)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
|
Fix for bad URL schemes
|
Fix for bad URL schemes
|
Python
|
bsd-3-clause
|
dominicrodger/tinyblog,dominicrodger/tinyblog
|
python
|
## Code Before:
from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
r = requests.get(url)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
## Instruction:
Fix for bad URL schemes
## Code After:
from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from requests import exceptions
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
try:
r = requests.get(url)
except exceptions.MissingSchema as e:
raise CommandError(e.message)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
|
// ... existing code ...
from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from requests import exceptions
from tinyblog.models import Post
// ... modified code ...
url = args[0]
try:
r = requests.get(url)
except exceptions.MissingSchema as e:
raise CommandError(e.message)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
// ... rest of the code ...
|
f02bb65b078b5c46d548ded6876b37eddda7adb1
|
Scheduler/src/main/java/Client.java
|
Scheduler/src/main/java/Client.java
|
import rabbit.Receiver;
public class Client {
public static void main(String[] args) {
Receiver receiver = new Receiver();
receiver.startReceiving();
}
}
|
import dmon.core.commons.helpers.CommandLineArgumentParser;
import dmon.core.commons.helpers.PidManipulation;
import dmon.core.commons.helpers.ProgramArguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import rabbit.Receiver;
public class Client {
public static final Logger logger = LoggerFactory.getLogger(Client.class);
public static void main(String[] args) {
MDC.put("pid", PidManipulation.getPid());
logger.info("Scanner is started!");
new CommandLineArgumentParser(args).parse();
if (ProgramArguments.getPidFile() != null) {
PidManipulation.writeOwnPidToFile(ProgramArguments.getPidFile());
}
else {
PidManipulation.writeOwnPidToFile("../var/specs_monitoring_nmap_scheduler.pid");
}
Receiver receiver = new Receiver();
receiver.startReceiving();
MDC.clear();
}
}
|
Add argument parser to Scheduler component.
|
Add argument parser to Scheduler component.
|
Java
|
apache-2.0
|
IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring
|
java
|
## Code Before:
import rabbit.Receiver;
public class Client {
public static void main(String[] args) {
Receiver receiver = new Receiver();
receiver.startReceiving();
}
}
## Instruction:
Add argument parser to Scheduler component.
## Code After:
import dmon.core.commons.helpers.CommandLineArgumentParser;
import dmon.core.commons.helpers.PidManipulation;
import dmon.core.commons.helpers.ProgramArguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import rabbit.Receiver;
public class Client {
public static final Logger logger = LoggerFactory.getLogger(Client.class);
public static void main(String[] args) {
MDC.put("pid", PidManipulation.getPid());
logger.info("Scanner is started!");
new CommandLineArgumentParser(args).parse();
if (ProgramArguments.getPidFile() != null) {
PidManipulation.writeOwnPidToFile(ProgramArguments.getPidFile());
}
else {
PidManipulation.writeOwnPidToFile("../var/specs_monitoring_nmap_scheduler.pid");
}
Receiver receiver = new Receiver();
receiver.startReceiving();
MDC.clear();
}
}
|
# ... existing code ...
import dmon.core.commons.helpers.CommandLineArgumentParser;
import dmon.core.commons.helpers.PidManipulation;
import dmon.core.commons.helpers.ProgramArguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import rabbit.Receiver;
public class Client {
public static final Logger logger = LoggerFactory.getLogger(Client.class);
public static void main(String[] args) {
MDC.put("pid", PidManipulation.getPid());
logger.info("Scanner is started!");
new CommandLineArgumentParser(args).parse();
if (ProgramArguments.getPidFile() != null) {
PidManipulation.writeOwnPidToFile(ProgramArguments.getPidFile());
}
else {
PidManipulation.writeOwnPidToFile("../var/specs_monitoring_nmap_scheduler.pid");
}
Receiver receiver = new Receiver();
receiver.startReceiving();
MDC.clear();
}
}
# ... rest of the code ...
|
b70923c9dc77371c24b3d2183b0573969602eca0
|
Source/Pester_Prefix.h
|
Source/Pester_Prefix.h
|
//
// Prefix header for all source files of the 'Pester' target in the 'Pester' project
//
#import "NJROperatingSystemVersion.h"
|
//
// Prefix header for all source files of the 'Pester' target in the 'Pester' project
//
#ifdef __OBJC__
#import <Cocoa/Cocoa.h>
#import "NJROperatingSystemVersion.h"
#endif
|
Fix Pester prefix header to import Cocoa again.
|
Fix Pester prefix header to import Cocoa again.
|
C
|
bsd-2-clause
|
ssp/Pester,ssp/Pester,ssp/Pester,ssp/Pester,nriley/Pester,ssp/Pester,nriley/Pester
|
c
|
## Code Before:
//
// Prefix header for all source files of the 'Pester' target in the 'Pester' project
//
#import "NJROperatingSystemVersion.h"
## Instruction:
Fix Pester prefix header to import Cocoa again.
## Code After:
//
// Prefix header for all source files of the 'Pester' target in the 'Pester' project
//
#ifdef __OBJC__
#import <Cocoa/Cocoa.h>
#import "NJROperatingSystemVersion.h"
#endif
|
# ... existing code ...
// Prefix header for all source files of the 'Pester' target in the 'Pester' project
//
#ifdef __OBJC__
#import <Cocoa/Cocoa.h>
#import "NJROperatingSystemVersion.h"
#endif
# ... rest of the code ...
|
1f03af4a3ceda754dc0196c49f295fc683bd6e5a
|
opps/core/cache/models.py
|
opps/core/cache/models.py
|
from django.db import models
class ModelCaching(models.Model):
pass
|
from django.db import models
from django.core.cache import cache
from .managers import CacheManager
ModelBase = type(models.Model)
class MetaCaching(ModelBase):
def __new__(*args, **kwargs):
new_class = ModelBase.__new__(*args, **kwargs)
new_manager = CacheManager()
new_manager.contribute_to_class(new_class, 'objects')
new_class._default_manager = new_manager
return new_class
|
Create MetaCaching, ModelBase for core cache
|
Create MetaCaching, ModelBase for core cache
|
Python
|
mit
|
YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps
|
python
|
## Code Before:
from django.db import models
class ModelCaching(models.Model):
pass
## Instruction:
Create MetaCaching, ModelBase for core cache
## Code After:
from django.db import models
from django.core.cache import cache
from .managers import CacheManager
ModelBase = type(models.Model)
class MetaCaching(ModelBase):
def __new__(*args, **kwargs):
new_class = ModelBase.__new__(*args, **kwargs)
new_manager = CacheManager()
new_manager.contribute_to_class(new_class, 'objects')
new_class._default_manager = new_manager
return new_class
|
...
from django.db import models
from django.core.cache import cache
from .managers import CacheManager
ModelBase = type(models.Model)
class MetaCaching(ModelBase):
def __new__(*args, **kwargs):
new_class = ModelBase.__new__(*args, **kwargs)
new_manager = CacheManager()
new_manager.contribute_to_class(new_class, 'objects')
new_class._default_manager = new_manager
return new_class
...
|
d84a4efcf880bb668b2721af3f4ce18220e8baab
|
xvistaprof/reader.py
|
xvistaprof/reader.py
|
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.loadtxt(filename, dtype=np.dtype(dt), skiprows=15)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
|
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15,
missing_values='*', filling_values=np.nan)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
|
Use np.genfromtext to handle missing values
|
Use np.genfromtext to handle missing values
|
Python
|
bsd-2-clause
|
jonathansick/xvistaprof
|
python
|
## Code Before:
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.loadtxt(filename, dtype=np.dtype(dt), skiprows=15)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
## Instruction:
Use np.genfromtext to handle missing values
## Code After:
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15,
missing_values='*', filling_values=np.nan)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
|
// ... existing code ...
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15,
missing_values='*', filling_values=np.nan)
return Table(data)
// ... rest of the code ...
|
2ace9ce514d7299a8f3e8dca134a6e4eb3284937
|
parser2.py
|
parser2.py
|
from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
Move parsing loop into the class itself.
|
Move parsing loop into the class itself.
|
Python
|
mit
|
zimolzak/Ignition-poker-parser
|
python
|
## Code Before:
from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
## Instruction:
Move parsing loop into the class itself.
## Code After:
from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
# ... existing code ...
from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
# ... rest of the code ...
|
88426415053f44202596e8bd573ca2ca6c056e04
|
schwifty/registry.py
|
schwifty/registry.py
|
import json
from pkg_resources import resource_filename
_registry = {}
def has(name):
return name in _registry
def get(name):
if not has(name):
with open(resource_filename(__name__, name + '-registry.json'), 'r') as fp:
save(name, json.load(fp))
return _registry[name]
def save(name, data):
_registry[name] = data
def build_index(base_name, index_name, key, **predicate):
def make_key(entry):
return tuple(entry[k] for k in key) if isinstance(key, tuple) else entry[key]
def match(entry):
return all(entry[key] == value for key, value in predicate.items())
base = get(base_name)
save(index_name, dict((make_key(entry), entry) for entry in base if match(entry)))
def manipulate(name, func):
registry = get(name)
if isinstance(registry, dict):
for key, value in registry.items():
registry[key] = func(key, value)
elif isinstance(registry, list):
registry = [func(item) for item in registry]
save(name, registry)
|
import json
from collections import defaultdict
from pkg_resources import resource_filename
_registry = {}
def has(name):
return name in _registry
def get(name):
if not has(name):
with open(resource_filename(__name__, name + '-registry.json'), 'r') as fp:
save(name, json.load(fp))
return _registry[name]
def save(name, data):
_registry[name] = data
def build_index(base_name, index_name, key, accumulate=False, **predicate):
def make_key(entry):
return tuple(entry[k] for k in key) if isinstance(key, tuple) else entry[key]
def match(entry):
return all(entry[key] == value for key, value in predicate.items())
base = get(base_name)
if accumulate:
data = defaultdict(list)
for entry in base:
if not match(entry):
continue
data[make_key(entry)].append(entry)
else:
data = dict((make_key(entry), entry) for entry in base if match(entry))
save(index_name, data)
def manipulate(name, func):
registry = get(name)
if isinstance(registry, dict):
for key, value in registry.items():
registry[key] = func(key, value)
elif isinstance(registry, list):
registry = [func(item) for item in registry]
save(name, registry)
|
Allow index to be accumulate values with same key
|
Allow index to be accumulate values with same key
|
Python
|
mit
|
figo-connect/schwifty
|
python
|
## Code Before:
import json
from pkg_resources import resource_filename
_registry = {}
def has(name):
return name in _registry
def get(name):
if not has(name):
with open(resource_filename(__name__, name + '-registry.json'), 'r') as fp:
save(name, json.load(fp))
return _registry[name]
def save(name, data):
_registry[name] = data
def build_index(base_name, index_name, key, **predicate):
def make_key(entry):
return tuple(entry[k] for k in key) if isinstance(key, tuple) else entry[key]
def match(entry):
return all(entry[key] == value for key, value in predicate.items())
base = get(base_name)
save(index_name, dict((make_key(entry), entry) for entry in base if match(entry)))
def manipulate(name, func):
registry = get(name)
if isinstance(registry, dict):
for key, value in registry.items():
registry[key] = func(key, value)
elif isinstance(registry, list):
registry = [func(item) for item in registry]
save(name, registry)
## Instruction:
Allow index to be accumulate values with same key
## Code After:
import json
from collections import defaultdict
from pkg_resources import resource_filename
_registry = {}
def has(name):
return name in _registry
def get(name):
if not has(name):
with open(resource_filename(__name__, name + '-registry.json'), 'r') as fp:
save(name, json.load(fp))
return _registry[name]
def save(name, data):
_registry[name] = data
def build_index(base_name, index_name, key, accumulate=False, **predicate):
def make_key(entry):
return tuple(entry[k] for k in key) if isinstance(key, tuple) else entry[key]
def match(entry):
return all(entry[key] == value for key, value in predicate.items())
base = get(base_name)
if accumulate:
data = defaultdict(list)
for entry in base:
if not match(entry):
continue
data[make_key(entry)].append(entry)
else:
data = dict((make_key(entry), entry) for entry in base if match(entry))
save(index_name, data)
def manipulate(name, func):
registry = get(name)
if isinstance(registry, dict):
for key, value in registry.items():
registry[key] = func(key, value)
elif isinstance(registry, list):
registry = [func(item) for item in registry]
save(name, registry)
|
# ... existing code ...
import json
from collections import defaultdict
from pkg_resources import resource_filename
# ... modified code ...
_registry[name] = data
def build_index(base_name, index_name, key, accumulate=False, **predicate):
def make_key(entry):
return tuple(entry[k] for k in key) if isinstance(key, tuple) else entry[key]
...
return all(entry[key] == value for key, value in predicate.items())
base = get(base_name)
if accumulate:
data = defaultdict(list)
for entry in base:
if not match(entry):
continue
data[make_key(entry)].append(entry)
else:
data = dict((make_key(entry), entry) for entry in base if match(entry))
save(index_name, data)
def manipulate(name, func):
# ... rest of the code ...
|
fa82d79f628ec9e45389b2a8e3f9a167e0fd1b8a
|
src/main/java/com/github/yuri0x7c1/ofbiz/explorer/common/ui/view/HomeView.java
|
src/main/java/com/github/yuri0x7c1/ofbiz/explorer/common/ui/view/HomeView.java
|
package com.github.yuri0x7c1.ofbiz.explorer.common.ui.view;
import org.vaadin.spring.sidebar.annotation.FontAwesomeIcon;
import org.vaadin.spring.sidebar.annotation.SideBarItem;
import com.github.yuri0x7c1.ofbiz.explorer.common.ui.sidebar.Sections;
import com.vaadin.navigator.View;
import com.vaadin.navigator.ViewChangeListener;
import com.vaadin.server.FontAwesome;
import com.vaadin.spring.annotation.SpringView;
import com.vaadin.ui.VerticalLayout;
@SpringView(name = "")
@SideBarItem(sectionId = Sections.VIEWS, caption = "Home", order = 0)
@FontAwesomeIcon(FontAwesome.HOME)
public class HomeView extends VerticalLayout implements View {
public HomeView() {
setSpacing(true);
setMargin(true);
}
@Override
public void enter(ViewChangeListener.ViewChangeEvent event) {
}
}
|
package com.github.yuri0x7c1.ofbiz.explorer.common.ui.view;
import org.vaadin.spring.sidebar.annotation.SideBarItem;
import org.vaadin.spring.sidebar.annotation.VaadinFontIcon;
import com.github.yuri0x7c1.ofbiz.explorer.common.ui.sidebar.Sections;
import com.vaadin.icons.VaadinIcons;
import com.vaadin.navigator.View;
import com.vaadin.navigator.ViewChangeListener;
import com.vaadin.spring.annotation.SpringView;
import com.vaadin.ui.VerticalLayout;
@SpringView(name = "")
@SideBarItem(sectionId = Sections.VIEWS, caption = "Home", order = 0)
@VaadinFontIcon(VaadinIcons.HOME)
public class HomeView extends VerticalLayout implements View {
public HomeView() {
setSpacing(true);
setMargin(true);
}
@Override
public void enter(ViewChangeListener.ViewChangeEvent event) {
}
}
|
Use vaadin icon instead font awesome icon for home page
|
Use vaadin icon instead font awesome icon for home page
|
Java
|
apache-2.0
|
yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer
|
java
|
## Code Before:
package com.github.yuri0x7c1.ofbiz.explorer.common.ui.view;
import org.vaadin.spring.sidebar.annotation.FontAwesomeIcon;
import org.vaadin.spring.sidebar.annotation.SideBarItem;
import com.github.yuri0x7c1.ofbiz.explorer.common.ui.sidebar.Sections;
import com.vaadin.navigator.View;
import com.vaadin.navigator.ViewChangeListener;
import com.vaadin.server.FontAwesome;
import com.vaadin.spring.annotation.SpringView;
import com.vaadin.ui.VerticalLayout;
@SpringView(name = "")
@SideBarItem(sectionId = Sections.VIEWS, caption = "Home", order = 0)
@FontAwesomeIcon(FontAwesome.HOME)
public class HomeView extends VerticalLayout implements View {
public HomeView() {
setSpacing(true);
setMargin(true);
}
@Override
public void enter(ViewChangeListener.ViewChangeEvent event) {
}
}
## Instruction:
Use vaadin icon instead font awesome icon for home page
## Code After:
package com.github.yuri0x7c1.ofbiz.explorer.common.ui.view;
import org.vaadin.spring.sidebar.annotation.SideBarItem;
import org.vaadin.spring.sidebar.annotation.VaadinFontIcon;
import com.github.yuri0x7c1.ofbiz.explorer.common.ui.sidebar.Sections;
import com.vaadin.icons.VaadinIcons;
import com.vaadin.navigator.View;
import com.vaadin.navigator.ViewChangeListener;
import com.vaadin.spring.annotation.SpringView;
import com.vaadin.ui.VerticalLayout;
@SpringView(name = "")
@SideBarItem(sectionId = Sections.VIEWS, caption = "Home", order = 0)
@VaadinFontIcon(VaadinIcons.HOME)
public class HomeView extends VerticalLayout implements View {
public HomeView() {
setSpacing(true);
setMargin(true);
}
@Override
public void enter(ViewChangeListener.ViewChangeEvent event) {
}
}
|
...
package com.github.yuri0x7c1.ofbiz.explorer.common.ui.view;
import org.vaadin.spring.sidebar.annotation.SideBarItem;
import org.vaadin.spring.sidebar.annotation.VaadinFontIcon;
import com.github.yuri0x7c1.ofbiz.explorer.common.ui.sidebar.Sections;
import com.vaadin.icons.VaadinIcons;
import com.vaadin.navigator.View;
import com.vaadin.navigator.ViewChangeListener;
import com.vaadin.spring.annotation.SpringView;
import com.vaadin.ui.VerticalLayout;
...
@SpringView(name = "")
@SideBarItem(sectionId = Sections.VIEWS, caption = "Home", order = 0)
@VaadinFontIcon(VaadinIcons.HOME)
public class HomeView extends VerticalLayout implements View {
public HomeView() {
...
|
2034259cc37a49f94cef872e29f1b72c84bc4493
|
detekt-sample-extensions/build.gradle.kts
|
detekt-sample-extensions/build.gradle.kts
|
buildscript {
repositories {
mavenLocal()
jcenter()
}
val kotlinVersion: String by project
dependencies {
classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlinVersion")
}
}
repositories {
jcenter()
}
apply {
plugin("kotlin")
}
val assertjVersion: String by project
val usedDetektVersion: String by project
val junitEngineVersion: String by project
val junitPlatformVersion: String by project
val spekVersion: String by project
dependencies {
implementation("io.gitlab.arturbosch.detekt:detekt-api:$usedDetektVersion")
testImplementation("io.gitlab.arturbosch.detekt:detekt-test:$usedDetektVersion")
testImplementation("org.junit.jupiter:junit-jupiter-api:$junitEngineVersion")
testImplementation(kotlin("test"))
testImplementation(kotlin("reflect"))
testImplementation("org.assertj:assertj-core:$assertjVersion")
testImplementation("org.jetbrains.spek:spek-api:$spekVersion")
testImplementation("org.jetbrains.spek:spek-subject-extension:$spekVersion")
testImplementation("org.junit.jupiter:junit-jupiter-engine:$junitEngineVersion")
testRuntimeOnly("org.junit.platform:junit-platform-launcher:$junitPlatformVersion")
testRuntimeOnly("org.jetbrains.spek:spek-junit-platform-engine:$spekVersion")
}
|
val assertjVersion: String by project
val usedDetektVersion: String by project
val junitEngineVersion: String by project
val junitPlatformVersion: String by project
val spekVersion: String by project
dependencies {
implementation("io.gitlab.arturbosch.detekt:detekt-api:$usedDetektVersion")
testImplementation("io.gitlab.arturbosch.detekt:detekt-test:$usedDetektVersion")
testImplementation("org.junit.jupiter:junit-jupiter-api:$junitEngineVersion")
testImplementation(kotlin("test"))
testImplementation(kotlin("reflect"))
testImplementation("org.assertj:assertj-core:$assertjVersion")
testImplementation("org.jetbrains.spek:spek-api:$spekVersion")
testImplementation("org.jetbrains.spek:spek-subject-extension:$spekVersion")
testImplementation("org.junit.jupiter:junit-jupiter-engine:$junitEngineVersion")
testRuntimeOnly("org.junit.platform:junit-platform-launcher:$junitPlatformVersion")
testRuntimeOnly("org.jetbrains.spek:spek-junit-platform-engine:$spekVersion")
}
|
Remove redundant Kotlin plugin config
|
Remove redundant Kotlin plugin config
|
Kotlin
|
apache-2.0
|
rock3r/detekt,rock3r/detekt,rock3r/detekt,arturbosch/detekt,arturbosch/detekt,arturbosch/detekt
|
kotlin
|
## Code Before:
buildscript {
repositories {
mavenLocal()
jcenter()
}
val kotlinVersion: String by project
dependencies {
classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlinVersion")
}
}
repositories {
jcenter()
}
apply {
plugin("kotlin")
}
val assertjVersion: String by project
val usedDetektVersion: String by project
val junitEngineVersion: String by project
val junitPlatformVersion: String by project
val spekVersion: String by project
dependencies {
implementation("io.gitlab.arturbosch.detekt:detekt-api:$usedDetektVersion")
testImplementation("io.gitlab.arturbosch.detekt:detekt-test:$usedDetektVersion")
testImplementation("org.junit.jupiter:junit-jupiter-api:$junitEngineVersion")
testImplementation(kotlin("test"))
testImplementation(kotlin("reflect"))
testImplementation("org.assertj:assertj-core:$assertjVersion")
testImplementation("org.jetbrains.spek:spek-api:$spekVersion")
testImplementation("org.jetbrains.spek:spek-subject-extension:$spekVersion")
testImplementation("org.junit.jupiter:junit-jupiter-engine:$junitEngineVersion")
testRuntimeOnly("org.junit.platform:junit-platform-launcher:$junitPlatformVersion")
testRuntimeOnly("org.jetbrains.spek:spek-junit-platform-engine:$spekVersion")
}
## Instruction:
Remove redundant Kotlin plugin config
## Code After:
val assertjVersion: String by project
val usedDetektVersion: String by project
val junitEngineVersion: String by project
val junitPlatformVersion: String by project
val spekVersion: String by project
dependencies {
implementation("io.gitlab.arturbosch.detekt:detekt-api:$usedDetektVersion")
testImplementation("io.gitlab.arturbosch.detekt:detekt-test:$usedDetektVersion")
testImplementation("org.junit.jupiter:junit-jupiter-api:$junitEngineVersion")
testImplementation(kotlin("test"))
testImplementation(kotlin("reflect"))
testImplementation("org.assertj:assertj-core:$assertjVersion")
testImplementation("org.jetbrains.spek:spek-api:$spekVersion")
testImplementation("org.jetbrains.spek:spek-subject-extension:$spekVersion")
testImplementation("org.junit.jupiter:junit-jupiter-engine:$junitEngineVersion")
testRuntimeOnly("org.junit.platform:junit-platform-launcher:$junitPlatformVersion")
testRuntimeOnly("org.jetbrains.spek:spek-junit-platform-engine:$spekVersion")
}
|
# ... existing code ...
val assertjVersion: String by project
val usedDetektVersion: String by project
val junitEngineVersion: String by project
# ... rest of the code ...
|
73f75efcfe69210d8e22ff55c19b02b7408b9671
|
pseudorandom.py
|
pseudorandom.py
|
from flask import Flask, render_template
from names import get_full_name
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html', name=get_full_name())
if __name__ == "__main__":
app.run()
|
import os
from flask import Flask, render_template
from names import get_full_name
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html', name=get_full_name())
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Use environment variable PORT for flask port
|
Use environment variable PORT for flask port
|
Python
|
mit
|
treyhunner/pseudorandom.name,treyhunner/pseudorandom.name
|
python
|
## Code Before:
from flask import Flask, render_template
from names import get_full_name
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html', name=get_full_name())
if __name__ == "__main__":
app.run()
## Instruction:
Use environment variable PORT for flask port
## Code After:
import os
from flask import Flask, render_template
from names import get_full_name
app = Flask(__name__)
@app.route("/")
def index():
return render_template('index.html', name=get_full_name())
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
...
import os
from flask import Flask, render_template
from names import get_full_name
...
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
...
|
e4b1fcf017494c22744f44bd93381b8063b30e34
|
eadred/tests/test_generate.py
|
eadred/tests/test_generate.py
|
import unittest
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.execute()
assert sampledata.called == True
|
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.run_from_argv(['manage.py', ''])
assert sampledata.called == True
|
Fix test to catch options issues
|
Fix test to catch options issues
The test should now catch the issue that was fixed in 79a453f.
|
Python
|
bsd-3-clause
|
willkg/django-eadred
|
python
|
## Code Before:
import unittest
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.execute()
assert sampledata.called == True
## Instruction:
Fix test to catch options issues
The test should now catch the issue that was fixed in 79a453f.
## Code After:
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.run_from_argv(['manage.py', ''])
assert sampledata.called == True
|
// ... existing code ...
from eadred.management.commands import generatedata
// ... modified code ...
assert sampledata.called == False
cmd = generatedata.Command()
cmd.run_from_argv(['manage.py', ''])
assert sampledata.called == True
// ... rest of the code ...
|
2fcd13435d04622c7ec0915a77efb390ea9c09b1
|
rcstreamlistener.py
|
rcstreamlistener.py
|
import urllib3.contrib.pyopenssl
import logging
from ipaddress import ip_address
from socketIO_client import SocketIO, BaseNamespace
urllib3.contrib.pyopenssl.inject_into_urllib3()
logging.basicConfig(level=logging.WARNING)
class MainNamespace(BaseNamespace):
def on_change(self, change):
if change['namespace'] == 3:
strippedTitle = change['title'].lstrip('User talk:')
try:
ipAddressObject = ip_address(strippedTitle)
print 'True'
except ValueError:
print 'False'
def on_connect(self):
self.emit('subscribe', 'en.wikipedia.org')
print 'Connected.'
print 'Connecting...'
socketIO = SocketIO('https://stream.wikimedia.org')
socketIO.define(MainNamespace, '/rc')
socketIO.wait()
|
import urllib3.contrib.pyopenssl
import logging
from ipaddress import ip_address
from socketIO_client import SocketIO, BaseNamespace
import template_adder
urllib3.contrib.pyopenssl.inject_into_urllib3()
logging.basicConfig(level=logging.WARNING)
class MainNamespace(BaseNamespace):
def on_change(self, change):
if change['namespace'] == 3:
strippedTitle = change['title'].lstrip('User talk:')
if ip_address(strippedTitle):
print 'True'
else:
print 'False'
def on_connect(self):
self.emit('subscribe', 'en.wikipedia.org')
print 'Connected.'
print 'Connecting...'
socketIO = SocketIO('https://stream.wikimedia.org')
socketIO.define(MainNamespace, '/rc')
socketIO.wait()
|
Use if/else instead of try/except
|
Use if/else instead of try/except
|
Python
|
mit
|
piagetbot/enwikibot
|
python
|
## Code Before:
import urllib3.contrib.pyopenssl
import logging
from ipaddress import ip_address
from socketIO_client import SocketIO, BaseNamespace
urllib3.contrib.pyopenssl.inject_into_urllib3()
logging.basicConfig(level=logging.WARNING)
class MainNamespace(BaseNamespace):
def on_change(self, change):
if change['namespace'] == 3:
strippedTitle = change['title'].lstrip('User talk:')
try:
ipAddressObject = ip_address(strippedTitle)
print 'True'
except ValueError:
print 'False'
def on_connect(self):
self.emit('subscribe', 'en.wikipedia.org')
print 'Connected.'
print 'Connecting...'
socketIO = SocketIO('https://stream.wikimedia.org')
socketIO.define(MainNamespace, '/rc')
socketIO.wait()
## Instruction:
Use if/else instead of try/except
## Code After:
import urllib3.contrib.pyopenssl
import logging
from ipaddress import ip_address
from socketIO_client import SocketIO, BaseNamespace
import template_adder
urllib3.contrib.pyopenssl.inject_into_urllib3()
logging.basicConfig(level=logging.WARNING)
class MainNamespace(BaseNamespace):
def on_change(self, change):
if change['namespace'] == 3:
strippedTitle = change['title'].lstrip('User talk:')
if ip_address(strippedTitle):
print 'True'
else:
print 'False'
def on_connect(self):
self.emit('subscribe', 'en.wikipedia.org')
print 'Connected.'
print 'Connecting...'
socketIO = SocketIO('https://stream.wikimedia.org')
socketIO.define(MainNamespace, '/rc')
socketIO.wait()
|
...
import logging
from ipaddress import ip_address
from socketIO_client import SocketIO, BaseNamespace
import template_adder
urllib3.contrib.pyopenssl.inject_into_urllib3()
logging.basicConfig(level=logging.WARNING)
...
def on_change(self, change):
if change['namespace'] == 3:
strippedTitle = change['title'].lstrip('User talk:')
if ip_address(strippedTitle):
print 'True'
else:
print 'False'
def on_connect(self):
...
|
07eb747a8a1b4e7510b3f2e7f5240663248962d6
|
app/src/main/java/lib/morkim/mfw/ui/Viewable.java
|
app/src/main/java/lib/morkim/mfw/ui/Viewable.java
|
package lib.morkim.mfw.ui;
import android.content.Context;
import lib.morkim.mfw.app.AppContext;
public interface Viewable {
public void finish();
AppContext getMorkimContext();
void keepScreenOn(boolean keepOn);
Context getContext();
}
|
package lib.morkim.mfw.ui;
import android.content.Context;
import java.util.Observer;
import lib.morkim.mfw.app.AppContext;
public interface Viewable extends Observer {
public void finish();
AppContext getMorkimContext();
void keepScreenOn(boolean keepOn);
Context getContext();
}
|
Remove view model in presenter
|
Remove view model in presenter
|
Java
|
mit
|
alkammar/morkim
|
java
|
## Code Before:
package lib.morkim.mfw.ui;
import android.content.Context;
import lib.morkim.mfw.app.AppContext;
public interface Viewable {
public void finish();
AppContext getMorkimContext();
void keepScreenOn(boolean keepOn);
Context getContext();
}
## Instruction:
Remove view model in presenter
## Code After:
package lib.morkim.mfw.ui;
import android.content.Context;
import java.util.Observer;
import lib.morkim.mfw.app.AppContext;
public interface Viewable extends Observer {
public void finish();
AppContext getMorkimContext();
void keepScreenOn(boolean keepOn);
Context getContext();
}
|
...
import android.content.Context;
import java.util.Observer;
import lib.morkim.mfw.app.AppContext;
public interface Viewable extends Observer {
public void finish();
AppContext getMorkimContext();
...
|
f845fcfc145edd2ef55df3275971f5c940a61bb4
|
tests/list_match.py
|
tests/list_match.py
|
from bedrock import *
@annot('void -> int')
def main():
a = hint(Cons(0, Cons(1, Nil())), a='int')
a = Cons(1, Cons(2, Cons(3, Nil)))
b = match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
("_", lambda: 4))
assert b == 2, "List pattern match"
return 0
|
from bedrock import *
@annot('void -> int')
def main():
a = hint(Cons(0, Cons(1, Nil())), a='int')
a = hint(Cons(1, Cons(2, Cons(3, Nil))), a='int')
#b = hint(match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
# ("_", lambda: 4)), a='int')
#assert b == 2, "List pattern match"
return 0
|
Disable match() test for now
|
Disable match() test for now
|
Python
|
mit
|
pshc/archipelago,pshc/archipelago,pshc/archipelago
|
python
|
## Code Before:
from bedrock import *
@annot('void -> int')
def main():
a = hint(Cons(0, Cons(1, Nil())), a='int')
a = Cons(1, Cons(2, Cons(3, Nil)))
b = match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
("_", lambda: 4))
assert b == 2, "List pattern match"
return 0
## Instruction:
Disable match() test for now
## Code After:
from bedrock import *
@annot('void -> int')
def main():
a = hint(Cons(0, Cons(1, Nil())), a='int')
a = hint(Cons(1, Cons(2, Cons(3, Nil))), a='int')
#b = hint(match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
# ("_", lambda: 4)), a='int')
#assert b == 2, "List pattern match"
return 0
|
// ... existing code ...
@annot('void -> int')
def main():
a = hint(Cons(0, Cons(1, Nil())), a='int')
a = hint(Cons(1, Cons(2, Cons(3, Nil))), a='int')
#b = hint(match(a, ("Cons(_, Cons(two, Cons(_, Nil())))", identity),
# ("_", lambda: 4)), a='int')
#assert b == 2, "List pattern match"
return 0
// ... rest of the code ...
|
fbb3c38417e85f327e6a347338a005162779314b
|
run_tests.py
|
run_tests.py
|
from __future__ import print_function
import os
import imp
import fnmatch
# Test directory
DIR_TEST = 'tests'
def find_tests(pathname):
"""Recursively finds the test modules.
:param str pathname: Path name where the tests are stored.
:returns: List of paths to each test modules.
:rtype: :class:`list`
"""
founds = []
for base, _, files in os.walk(pathname):
founds.extend((
(matched_file, base)
for matched_file in fnmatch.filter(files, '*.py')
if matched_file != "__init__.py"))
return founds
def run_tests(pathnames):
"""Loads each test module and run their `run` function.
:param list pathnames: List of (module_name, path_to_the_module).
"""
for module, path in pathnames:
current_mod = imp.load_source(
os.path.splitext(module)[0],
os.path.join(path, module))
print("Testing:", current_mod.__testname__)
current_mod.run()
if __name__ == '__main__':
run_tests(find_tests(os.path.join(os.getcwd(), DIR_TEST)))
|
from __future__ import print_function
import os
import sys
import imp
import fnmatch
# Test directory
DIR_TEST = 'tests'
def find_tests(pathname):
"""Recursively finds the test modules.
:param str pathname: Path name where the tests are stored.
:returns: List of paths to each test modules.
:rtype: :class:`list`
"""
founds = []
for base, _, files in os.walk(pathname):
founds.extend((
(matched_file, base)
for matched_file in fnmatch.filter(files, '*.py')
if matched_file != "__init__.py"))
return founds
def run_tests(pathnames, test_name=None):
"""Loads each test module and run their `run` function.
:param list pathnames: List of (module_name, path_to_the_module).
"""
for module, path in pathnames:
current_mod = imp.load_source(
os.path.splitext(module)[0],
os.path.join(path, module))
if test_name and test_name != current_mod.__testname__:
continue
print("Testing:", current_mod.__testname__)
current_mod.run()
if __name__ == '__main__':
test_name = None
if len(sys.argv) == 2:
test_name = sys.argv[1]
run_tests(find_tests(os.path.join(os.getcwd(), DIR_TEST)), test_name)
|
Allow to chose a specific case test.
|
[tests] Allow to chose a specific case test.
|
Python
|
bsd-3-clause
|
owtf/ptp,DoomTaper/ptp
|
python
|
## Code Before:
from __future__ import print_function
import os
import imp
import fnmatch
# Test directory
DIR_TEST = 'tests'
def find_tests(pathname):
"""Recursively finds the test modules.
:param str pathname: Path name where the tests are stored.
:returns: List of paths to each test modules.
:rtype: :class:`list`
"""
founds = []
for base, _, files in os.walk(pathname):
founds.extend((
(matched_file, base)
for matched_file in fnmatch.filter(files, '*.py')
if matched_file != "__init__.py"))
return founds
def run_tests(pathnames):
"""Loads each test module and run their `run` function.
:param list pathnames: List of (module_name, path_to_the_module).
"""
for module, path in pathnames:
current_mod = imp.load_source(
os.path.splitext(module)[0],
os.path.join(path, module))
print("Testing:", current_mod.__testname__)
current_mod.run()
if __name__ == '__main__':
run_tests(find_tests(os.path.join(os.getcwd(), DIR_TEST)))
## Instruction:
[tests] Allow to chose a specific case test.
## Code After:
from __future__ import print_function
import os
import sys
import imp
import fnmatch
# Test directory
DIR_TEST = 'tests'
def find_tests(pathname):
"""Recursively finds the test modules.
:param str pathname: Path name where the tests are stored.
:returns: List of paths to each test modules.
:rtype: :class:`list`
"""
founds = []
for base, _, files in os.walk(pathname):
founds.extend((
(matched_file, base)
for matched_file in fnmatch.filter(files, '*.py')
if matched_file != "__init__.py"))
return founds
def run_tests(pathnames, test_name=None):
"""Loads each test module and run their `run` function.
:param list pathnames: List of (module_name, path_to_the_module).
"""
for module, path in pathnames:
current_mod = imp.load_source(
os.path.splitext(module)[0],
os.path.join(path, module))
if test_name and test_name != current_mod.__testname__:
continue
print("Testing:", current_mod.__testname__)
current_mod.run()
if __name__ == '__main__':
test_name = None
if len(sys.argv) == 2:
test_name = sys.argv[1]
run_tests(find_tests(os.path.join(os.getcwd(), DIR_TEST)), test_name)
|
# ... existing code ...
from __future__ import print_function
import os
import sys
import imp
import fnmatch
# ... modified code ...
return founds
def run_tests(pathnames, test_name=None):
"""Loads each test module and run their `run` function.
:param list pathnames: List of (module_name, path_to_the_module).
...
current_mod = imp.load_source(
os.path.splitext(module)[0],
os.path.join(path, module))
if test_name and test_name != current_mod.__testname__:
continue
print("Testing:", current_mod.__testname__)
current_mod.run()
if __name__ == '__main__':
test_name = None
if len(sys.argv) == 2:
test_name = sys.argv[1]
run_tests(find_tests(os.path.join(os.getcwd(), DIR_TEST)), test_name)
# ... rest of the code ...
|
a39acef2a0cad730bfa8317dc98c733212b2afb0
|
include/libk/kmem.h
|
include/libk/kmem.h
|
//#define KHEAP_PHYS_END ((void*)0xc1000000)
#define KHEAP_END_SENTINEL (NULL)
#define KHEAP_BLOCK_SLOP 32
struct kheap_metadata {
size_t size;
struct kheap_metadata *next;
bool is_free;
};
struct kheap_metadata *root;
struct kheap_metadata *kheap_init();
int kheap_extend();
void kheap_install(struct kheap_metadata *root, size_t initial_heap_size);
void *kmalloc(size_t bytes);
void kfree(void *mem);
void kheap_defragment();
#endif
|
// KHEAP_PHYS_ROOT is defined in memlayout.h because it is architecture
// specific.
#define KHEAP_END_SENTINEL (NULL)
#define KHEAP_BLOCK_SLOP 32
struct kheap_metadata {
size_t size;
struct kheap_metadata *next;
bool is_free;
};
struct kheap_metadata *root;
struct kheap_metadata *kheap_init();
int kheap_extend();
void kheap_install(struct kheap_metadata *root, size_t initial_heap_size);
void *kmalloc(size_t bytes);
void kfree(void *mem);
void kheap_defragment();
#endif
|
Move KHEAP macros into memlayout.h
|
Move KHEAP macros into memlayout.h
We should try to make this file architecture agnostic
|
C
|
mit
|
iankronquist/kernel-of-truth,iankronquist/kernel-of-truth,Herbstein/kernel-of-truth,awensaunders/kernel-of-truth,iankronquist/kernel-of-truth,Herbstein/kernel-of-truth,awensaunders/kernel-of-truth,Herbstein/kernel-of-truth,iankronquist/kernel-of-truth,awensaunders/kernel-of-truth,iankronquist/kernel-of-truth
|
c
|
## Code Before:
//#define KHEAP_PHYS_END ((void*)0xc1000000)
#define KHEAP_END_SENTINEL (NULL)
#define KHEAP_BLOCK_SLOP 32
struct kheap_metadata {
size_t size;
struct kheap_metadata *next;
bool is_free;
};
struct kheap_metadata *root;
struct kheap_metadata *kheap_init();
int kheap_extend();
void kheap_install(struct kheap_metadata *root, size_t initial_heap_size);
void *kmalloc(size_t bytes);
void kfree(void *mem);
void kheap_defragment();
#endif
## Instruction:
Move KHEAP macros into memlayout.h
We should try to make this file architecture agnostic
## Code After:
// KHEAP_PHYS_ROOT is defined in memlayout.h because it is architecture
// specific.
#define KHEAP_END_SENTINEL (NULL)
#define KHEAP_BLOCK_SLOP 32
struct kheap_metadata {
size_t size;
struct kheap_metadata *next;
bool is_free;
};
struct kheap_metadata *root;
struct kheap_metadata *kheap_init();
int kheap_extend();
void kheap_install(struct kheap_metadata *root, size_t initial_heap_size);
void *kmalloc(size_t bytes);
void kfree(void *mem);
void kheap_defragment();
#endif
|
...
// KHEAP_PHYS_ROOT is defined in memlayout.h because it is architecture
// specific.
#define KHEAP_END_SENTINEL (NULL)
#define KHEAP_BLOCK_SLOP 32
...
|
c9ca274a1a5e9596de553d2ae16950a845359321
|
examples/plotting/file/geojson_points.py
|
examples/plotting/file/geojson_points.py
|
from bokeh.io import output_file, show
from bokeh.models import GeoJSONDataSource
from bokeh.plotting import figure
from bokeh.sampledata.sample_geojson import geojson
output_file("geojson_points.html", title="GeoJSON Points")
p = figure()
p.circle(line_color=None, fill_alpha=0.8, source=GeoJSONDataSource(geojson=geojson))
show(p)
|
from bokeh.io import output_file, show
from bokeh.models import GeoJSONDataSource, HoverTool
from bokeh.plotting import figure
from bokeh.sampledata.sample_geojson import geojson
output_file("geojson_points.html", title="GeoJSON Points")
p = figure()
p.circle(line_color=None, fill_alpha=0.8, size=20, source=GeoJSONDataSource(geojson=geojson))
p.add_tools(HoverTool(tooltips=[("Organisation Name", "@OrganisationName")]))
show(p)
|
Add HoverTool to show off properties availability
|
Add HoverTool to show off properties availability
|
Python
|
bsd-3-clause
|
draperjames/bokeh,bokeh/bokeh,timsnyder/bokeh,dennisobrien/bokeh,maxalbert/bokeh,phobson/bokeh,jakirkham/bokeh,stonebig/bokeh,jakirkham/bokeh,bokeh/bokeh,DuCorey/bokeh,justacec/bokeh,dennisobrien/bokeh,clairetang6/bokeh,azjps/bokeh,timsnyder/bokeh,aiguofer/bokeh,timsnyder/bokeh,ericmjl/bokeh,DuCorey/bokeh,rs2/bokeh,bokeh/bokeh,jakirkham/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,azjps/bokeh,mindriot101/bokeh,philippjfr/bokeh,aiguofer/bokeh,timsnyder/bokeh,draperjames/bokeh,clairetang6/bokeh,percyfal/bokeh,stonebig/bokeh,philippjfr/bokeh,DuCorey/bokeh,schoolie/bokeh,ptitjano/bokeh,mindriot101/bokeh,schoolie/bokeh,jakirkham/bokeh,aiguofer/bokeh,stonebig/bokeh,percyfal/bokeh,phobson/bokeh,phobson/bokeh,dennisobrien/bokeh,azjps/bokeh,quasiben/bokeh,clairetang6/bokeh,draperjames/bokeh,ericmjl/bokeh,aavanian/bokeh,msarahan/bokeh,aiguofer/bokeh,ericmjl/bokeh,philippjfr/bokeh,htygithub/bokeh,dennisobrien/bokeh,ericmjl/bokeh,rs2/bokeh,percyfal/bokeh,quasiben/bokeh,ptitjano/bokeh,KasperPRasmussen/bokeh,timsnyder/bokeh,dennisobrien/bokeh,maxalbert/bokeh,aavanian/bokeh,msarahan/bokeh,percyfal/bokeh,htygithub/bokeh,rs2/bokeh,DuCorey/bokeh,msarahan/bokeh,ptitjano/bokeh,philippjfr/bokeh,clairetang6/bokeh,draperjames/bokeh,maxalbert/bokeh,azjps/bokeh,aiguofer/bokeh,Karel-van-de-Plassche/bokeh,ptitjano/bokeh,mindriot101/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,rs2/bokeh,bokeh/bokeh,schoolie/bokeh,phobson/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,aavanian/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,philippjfr/bokeh,justacec/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,quasiben/bokeh,aavanian/bokeh,DuCorey/bokeh,stonebig/bokeh,ericmjl/bokeh,schoolie/bokeh,msarahan/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,KasperPRasmussen/bokeh,KasperPRasmussen/bokeh,htygithub/bokeh,mindriot101/bokeh,azjps/bokeh,bokeh/bokeh,maxalbert/bokeh,percyfal/bokeh,schoolie/bokeh,aavanian/bokeh,ptitjano/bokeh
|
python
|
## Code Before:
from bokeh.io import output_file, show
from bokeh.models import GeoJSONDataSource
from bokeh.plotting import figure
from bokeh.sampledata.sample_geojson import geojson
output_file("geojson_points.html", title="GeoJSON Points")
p = figure()
p.circle(line_color=None, fill_alpha=0.8, source=GeoJSONDataSource(geojson=geojson))
show(p)
## Instruction:
Add HoverTool to show off properties availability
## Code After:
from bokeh.io import output_file, show
from bokeh.models import GeoJSONDataSource, HoverTool
from bokeh.plotting import figure
from bokeh.sampledata.sample_geojson import geojson
output_file("geojson_points.html", title="GeoJSON Points")
p = figure()
p.circle(line_color=None, fill_alpha=0.8, size=20, source=GeoJSONDataSource(geojson=geojson))
p.add_tools(HoverTool(tooltips=[("Organisation Name", "@OrganisationName")]))
show(p)
|
# ... existing code ...
from bokeh.io import output_file, show
from bokeh.models import GeoJSONDataSource, HoverTool
from bokeh.plotting import figure
from bokeh.sampledata.sample_geojson import geojson
# ... modified code ...
output_file("geojson_points.html", title="GeoJSON Points")
p = figure()
p.circle(line_color=None, fill_alpha=0.8, size=20, source=GeoJSONDataSource(geojson=geojson))
p.add_tools(HoverTool(tooltips=[("Organisation Name", "@OrganisationName")]))
show(p)
# ... rest of the code ...
|
da28458dffc3529f16cb222fce1676ddb0d87e05
|
oembed/resources.py
|
oembed/resources.py
|
from django.utils.simplejson import simplejson
from oembed.exceptions import OEmbedException
class OEmbedResource(object):
"""
OEmbed resource, as well as a factory for creating resource instances
from response json
"""
_data = {}
content_object = None
def __getattr__(self, name):
return self._data.get(name)
def get_data(self):
return self._data
def load_data(self, data):
self._data = data
@property
def json(self):
return simplejson.dumps(self._data)
@classmethod
def create(cls, data):
if not 'type' in data or not 'version' in data:
raise OEmbedException('Missing required fields on OEmbed response.')
data['width'] = data.get('width') and int(data['width']) or None
data['height'] = data.get('height') and int(data['height']) or None
filtered_data = dict([(k, v) for k, v in data.items() if v])
resource = cls()
resource.load_data(filtered_data)
return resource
@classmethod
def create_json(cls, raw):
data = simplejson.loads(raw)
return cls.create(data)
|
from django.utils import simplejson
from oembed.exceptions import OEmbedException
class OEmbedResource(object):
"""
OEmbed resource, as well as a factory for creating resource instances
from response json
"""
_data = {}
content_object = None
def __getattr__(self, name):
return self._data.get(name)
def get_data(self):
return self._data
def load_data(self, data):
self._data = data
@property
def json(self):
return simplejson.dumps(self._data)
@classmethod
def create(cls, data):
if not 'type' in data or not 'version' in data:
raise OEmbedException('Missing required fields on OEmbed response.')
data['width'] = data.get('width') and int(data['width']) or None
data['height'] = data.get('height') and int(data['height']) or None
filtered_data = dict([(k, v) for k, v in data.items() if v])
resource = cls()
resource.load_data(filtered_data)
return resource
@classmethod
def create_json(cls, raw):
data = simplejson.loads(raw)
return cls.create(data)
|
Use the simplejson bundled with django
|
Use the simplejson bundled with django
|
Python
|
mit
|
0101/djangoembed,worldcompany/djangoembed,akvo/djangoembed,akvo/djangoembed,worldcompany/djangoembed,d4nielcosta/djangoembed,0101/djangoembed,d4nielcosta/djangoembed
|
python
|
## Code Before:
from django.utils.simplejson import simplejson
from oembed.exceptions import OEmbedException
class OEmbedResource(object):
"""
OEmbed resource, as well as a factory for creating resource instances
from response json
"""
_data = {}
content_object = None
def __getattr__(self, name):
return self._data.get(name)
def get_data(self):
return self._data
def load_data(self, data):
self._data = data
@property
def json(self):
return simplejson.dumps(self._data)
@classmethod
def create(cls, data):
if not 'type' in data or not 'version' in data:
raise OEmbedException('Missing required fields on OEmbed response.')
data['width'] = data.get('width') and int(data['width']) or None
data['height'] = data.get('height') and int(data['height']) or None
filtered_data = dict([(k, v) for k, v in data.items() if v])
resource = cls()
resource.load_data(filtered_data)
return resource
@classmethod
def create_json(cls, raw):
data = simplejson.loads(raw)
return cls.create(data)
## Instruction:
Use the simplejson bundled with django
## Code After:
from django.utils import simplejson
from oembed.exceptions import OEmbedException
class OEmbedResource(object):
"""
OEmbed resource, as well as a factory for creating resource instances
from response json
"""
_data = {}
content_object = None
def __getattr__(self, name):
return self._data.get(name)
def get_data(self):
return self._data
def load_data(self, data):
self._data = data
@property
def json(self):
return simplejson.dumps(self._data)
@classmethod
def create(cls, data):
if not 'type' in data or not 'version' in data:
raise OEmbedException('Missing required fields on OEmbed response.')
data['width'] = data.get('width') and int(data['width']) or None
data['height'] = data.get('height') and int(data['height']) or None
filtered_data = dict([(k, v) for k, v in data.items() if v])
resource = cls()
resource.load_data(filtered_data)
return resource
@classmethod
def create_json(cls, raw):
data = simplejson.loads(raw)
return cls.create(data)
|
...
from django.utils import simplejson
from oembed.exceptions import OEmbedException
...
|
39f831ae6569c451c5a119d2ef620199be63992d
|
Magic/src/main/java/com/elmakers/mine/bukkit/magic/MobTrigger.java
|
Magic/src/main/java/com/elmakers/mine/bukkit/magic/MobTrigger.java
|
package com.elmakers.mine.bukkit.magic;
import javax.annotation.Nonnull;
import org.bukkit.configuration.ConfigurationSection;
import com.elmakers.mine.bukkit.api.magic.MageController;
public class MobTrigger extends CustomTrigger {
public MobTrigger(@Nonnull MageController controller, @Nonnull String key, @Nonnull ConfigurationSection configuration) {
super(controller, key, configuration);
}
}
|
package com.elmakers.mine.bukkit.magic;
import javax.annotation.Nonnull;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Creature;
import org.bukkit.entity.Entity;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.magic.MageController;
public class MobTrigger extends CustomTrigger {
boolean requiresTarget = false;
public MobTrigger(@Nonnull MageController controller, @Nonnull String key, @Nonnull ConfigurationSection configuration) {
super(controller, key, configuration);
requiresTarget = configuration.getBoolean("requires_target");
}
public boolean isValid(Mage mage) {
if (!isValid(mage)) return false;
if (requiresTarget) {
Entity entity = mage.getEntity();
if (entity instanceof Creature) {
return ((Creature)entity).getTarget() != null;
}
}
return true;
}
}
|
Add require_targets to mob triggers
|
Add require_targets to mob triggers
|
Java
|
mit
|
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
|
java
|
## Code Before:
package com.elmakers.mine.bukkit.magic;
import javax.annotation.Nonnull;
import org.bukkit.configuration.ConfigurationSection;
import com.elmakers.mine.bukkit.api.magic.MageController;
public class MobTrigger extends CustomTrigger {
public MobTrigger(@Nonnull MageController controller, @Nonnull String key, @Nonnull ConfigurationSection configuration) {
super(controller, key, configuration);
}
}
## Instruction:
Add require_targets to mob triggers
## Code After:
package com.elmakers.mine.bukkit.magic;
import javax.annotation.Nonnull;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Creature;
import org.bukkit.entity.Entity;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.magic.MageController;
public class MobTrigger extends CustomTrigger {
boolean requiresTarget = false;
public MobTrigger(@Nonnull MageController controller, @Nonnull String key, @Nonnull ConfigurationSection configuration) {
super(controller, key, configuration);
requiresTarget = configuration.getBoolean("requires_target");
}
public boolean isValid(Mage mage) {
if (!isValid(mage)) return false;
if (requiresTarget) {
Entity entity = mage.getEntity();
if (entity instanceof Creature) {
return ((Creature)entity).getTarget() != null;
}
}
return true;
}
}
|
# ... existing code ...
import javax.annotation.Nonnull;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Creature;
import org.bukkit.entity.Entity;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.magic.MageController;
public class MobTrigger extends CustomTrigger {
boolean requiresTarget = false;
public MobTrigger(@Nonnull MageController controller, @Nonnull String key, @Nonnull ConfigurationSection configuration) {
super(controller, key, configuration);
requiresTarget = configuration.getBoolean("requires_target");
}
public boolean isValid(Mage mage) {
if (!isValid(mage)) return false;
if (requiresTarget) {
Entity entity = mage.getEntity();
if (entity instanceof Creature) {
return ((Creature)entity).getTarget() != null;
}
}
return true;
}
}
# ... rest of the code ...
|
94cae9c13ac90a7de50cfaf998b9b423e7a2eaf1
|
csunplugged/resources/utils/resource_valid_configurations.py
|
csunplugged/resources/utils/resource_valid_configurations.py
|
"""Create list of all possible valid resource combinations."""
import itertools
from utils.bool_to_yes_no import bool_to_yes_no
def resource_valid_configurations(valid_options, header_text=True):
"""Return list of all possible valid resource combinations.
Args:
valid_options: A dictionary containing all valid resource generation
options (dict).
header_text: If true, add in valid options for header text (bool).
Returns:
List of dictionaries of valid combinations (list).
"""
if header_text:
valid_options["header_text"] = ["", "Example header"]
# Change all booleans to text to mimic forms
for (key, value) in valid_options.items():
if isinstance(value, bool):
valid_options[key] = bool_to_yes_no(value)
valid_option_keys = sorted(valid_options)
return [dict(zip(valid_option_keys, product)) for product in itertools.product(
*(valid_options[valid_option_key] for valid_option_key in valid_option_keys)
)]
|
"""Create list of all possible valid resource combinations."""
import itertools
from utils.bool_to_yes_no import bool_to_yes_no
def resource_valid_configurations(valid_options, header_text=True):
"""Return list of all possible valid resource combinations.
Args:
valid_options: A dictionary containing all valid resource generation
options (dict).
header_text: If true, add in valid options for header text (bool).
Returns:
List of dictionaries of valid combinations (list).
"""
if header_text:
valid_options["header_text"] = ["", "Example header"]
# Change all booleans to text to mimic forms
for (key, values) in valid_options.items():
for i in range(0, len(values)):
if isinstance(values[i], bool):
values[i] = bool_to_yes_no(values[i])
valid_option_keys = sorted(valid_options)
return [dict(zip(valid_option_keys, product)) for product in itertools.product(
*(valid_options[valid_option_key] for valid_option_key in valid_option_keys)
)]
|
Fix bug where boolean combination values were not changed to strings
|
Fix bug where boolean combination values were not changed to strings
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
python
|
## Code Before:
"""Create list of all possible valid resource combinations."""
import itertools
from utils.bool_to_yes_no import bool_to_yes_no
def resource_valid_configurations(valid_options, header_text=True):
"""Return list of all possible valid resource combinations.
Args:
valid_options: A dictionary containing all valid resource generation
options (dict).
header_text: If true, add in valid options for header text (bool).
Returns:
List of dictionaries of valid combinations (list).
"""
if header_text:
valid_options["header_text"] = ["", "Example header"]
# Change all booleans to text to mimic forms
for (key, value) in valid_options.items():
if isinstance(value, bool):
valid_options[key] = bool_to_yes_no(value)
valid_option_keys = sorted(valid_options)
return [dict(zip(valid_option_keys, product)) for product in itertools.product(
*(valid_options[valid_option_key] for valid_option_key in valid_option_keys)
)]
## Instruction:
Fix bug where boolean combination values were not changed to strings
## Code After:
"""Create list of all possible valid resource combinations."""
import itertools
from utils.bool_to_yes_no import bool_to_yes_no
def resource_valid_configurations(valid_options, header_text=True):
"""Return list of all possible valid resource combinations.
Args:
valid_options: A dictionary containing all valid resource generation
options (dict).
header_text: If true, add in valid options for header text (bool).
Returns:
List of dictionaries of valid combinations (list).
"""
if header_text:
valid_options["header_text"] = ["", "Example header"]
# Change all booleans to text to mimic forms
for (key, values) in valid_options.items():
for i in range(0, len(values)):
if isinstance(values[i], bool):
values[i] = bool_to_yes_no(values[i])
valid_option_keys = sorted(valid_options)
return [dict(zip(valid_option_keys, product)) for product in itertools.product(
*(valid_options[valid_option_key] for valid_option_key in valid_option_keys)
)]
|
# ... existing code ...
if header_text:
valid_options["header_text"] = ["", "Example header"]
# Change all booleans to text to mimic forms
for (key, values) in valid_options.items():
for i in range(0, len(values)):
if isinstance(values[i], bool):
values[i] = bool_to_yes_no(values[i])
valid_option_keys = sorted(valid_options)
return [dict(zip(valid_option_keys, product)) for product in itertools.product(
*(valid_options[valid_option_key] for valid_option_key in valid_option_keys)
# ... rest of the code ...
|
9e73de0014b3f88b9e94ead11a878c6bc3819782
|
selenium_testcase/tests/test_navigation.py
|
selenium_testcase/tests/test_navigation.py
|
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
|
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.has_title("Navigation 1")
self.title_contains("1")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
def test_get_bad_page(self):
""" Test that /bogus/ is not found. """
self.get_page("/bogus/")
self.should_see("Not Found")
self.should_see(
"The requested URL /bogus/ was not found on this server.")
def test_missing_content_with_retry(self):
""" Test retry for missing content, LONG RETRIES! """
self.get_page("/nav_1/")
self.should_not_see("This is nav 2.")
self.url_should_not_contain("nav_2")
self.has_not_title("Navigation 2")
self.title_does_not_contain("2")
self.assertRaises(
AssertionError, self.click_button, "not_there_dude")
self.not_at_page("/nav_2/")
|
Test missing content and failed navigation tests.
|
Test missing content and failed navigation tests.
This commit adds unit tests outside of the happy path where
a url does not exist or the test is looking for conten that
doesn't exist on the page. Since testing for missing informaion
requires timeouts to be sure, some of these tests take several
seconds to execute.
|
Python
|
bsd-3-clause
|
nimbis/django-selenium-testcase,nimbis/django-selenium-testcase
|
python
|
## Code Before:
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
## Instruction:
Test missing content and failed navigation tests.
This commit adds unit tests outside of the happy path where
a url does not exist or the test is looking for conten that
doesn't exist on the page. Since testing for missing informaion
requires timeouts to be sure, some of these tests take several
seconds to execute.
## Code After:
from __future__ import absolute_import
from ..testcases import SeleniumLiveTestCase
class NavigationTestCase(SeleniumLiveTestCase):
test_templates = [
(r'^nav_1/$', 'nav_1.html'),
(r'^nav_1/nav_2/$', 'nav_2.html')
]
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.has_title("Navigation 1")
self.title_contains("1")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
def test_get_bad_page(self):
""" Test that /bogus/ is not found. """
self.get_page("/bogus/")
self.should_see("Not Found")
self.should_see(
"The requested URL /bogus/ was not found on this server.")
def test_missing_content_with_retry(self):
""" Test retry for missing content, LONG RETRIES! """
self.get_page("/nav_1/")
self.should_not_see("This is nav 2.")
self.url_should_not_contain("nav_2")
self.has_not_title("Navigation 2")
self.title_does_not_contain("2")
self.assertRaises(
AssertionError, self.click_button, "not_there_dude")
self.not_at_page("/nav_2/")
|
// ... existing code ...
def test_get_page(self):
""" Test that you can traverse the page tree. """
self.get_page("/nav_1/")
self.has_title("Navigation 1")
self.title_contains("1")
self.should_see("This is nav 1.")
self.get_page("/nav_1/nav_2/")
self.should_see("This is nav 2.")
def test_get_bad_page(self):
""" Test that /bogus/ is not found. """
self.get_page("/bogus/")
self.should_see("Not Found")
self.should_see(
"The requested URL /bogus/ was not found on this server.")
def test_missing_content_with_retry(self):
""" Test retry for missing content, LONG RETRIES! """
self.get_page("/nav_1/")
self.should_not_see("This is nav 2.")
self.url_should_not_contain("nav_2")
self.has_not_title("Navigation 2")
self.title_does_not_contain("2")
self.assertRaises(
AssertionError, self.click_button, "not_there_dude")
self.not_at_page("/nav_2/")
// ... rest of the code ...
|
697120d4e693bf7fbc192164b5df3dfb30f71a3f
|
tests/__init__.py
|
tests/__init__.py
|
import logging
import unittest
import os
from sqlalchemy import create_engine
from rtrss import config, database
logging.disable(logging.ERROR)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI,
echo=False,
client_encoding='utf8')
# Reconfigure session factory to use our test schema
database.Session.configure(bind=engine)
class AttrDict(dict):
"""Class to make mock objects"""
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
class RTRSSTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
if os.path.isdir(config.DATA_DIR):
os.rmdir(config.DATA_DIR)
os.makedirs(config.DATA_DIR)
@classmethod
def tearDownClass(cls):
os.rmdir(config.DATA_DIR)
class RTRSSDataBaseTestCase(RTRSSTestCase):
def setUp(self):
database.clear(engine)
database.init(engine)
self.db = database.Session()
def tearDown(self):
database.clear(engine)
self.db.close()
|
import logging
import unittest
import os
import shutil
from sqlalchemy import create_engine
from rtrss import config, database
logging.disable(logging.ERROR)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI,
echo=False,
client_encoding='utf8')
# Reconfigure session factory to use our test schema
database.Session.configure(bind=engine)
class AttrDict(dict):
"""Class to make mock objects"""
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
class RTRSSTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
if os.path.isdir(config.DATA_DIR):
os.rmdir(config.DATA_DIR)
os.makedirs(config.DATA_DIR)
@classmethod
def tearDownClass(cls):
shutil.rmtree(config.DATA_DIR)
class RTRSSDataBaseTestCase(RTRSSTestCase):
def setUp(self):
database.clear(engine)
database.init(engine)
self.db = database.Session()
def tearDown(self):
database.clear(engine)
self.db.close()
|
Remove test data folder with contents
|
Remove test data folder with contents
|
Python
|
apache-2.0
|
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
|
python
|
## Code Before:
import logging
import unittest
import os
from sqlalchemy import create_engine
from rtrss import config, database
logging.disable(logging.ERROR)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI,
echo=False,
client_encoding='utf8')
# Reconfigure session factory to use our test schema
database.Session.configure(bind=engine)
class AttrDict(dict):
"""Class to make mock objects"""
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
class RTRSSTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
if os.path.isdir(config.DATA_DIR):
os.rmdir(config.DATA_DIR)
os.makedirs(config.DATA_DIR)
@classmethod
def tearDownClass(cls):
os.rmdir(config.DATA_DIR)
class RTRSSDataBaseTestCase(RTRSSTestCase):
def setUp(self):
database.clear(engine)
database.init(engine)
self.db = database.Session()
def tearDown(self):
database.clear(engine)
self.db.close()
## Instruction:
Remove test data folder with contents
## Code After:
import logging
import unittest
import os
import shutil
from sqlalchemy import create_engine
from rtrss import config, database
logging.disable(logging.ERROR)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI,
echo=False,
client_encoding='utf8')
# Reconfigure session factory to use our test schema
database.Session.configure(bind=engine)
class AttrDict(dict):
"""Class to make mock objects"""
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
class RTRSSTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
if os.path.isdir(config.DATA_DIR):
os.rmdir(config.DATA_DIR)
os.makedirs(config.DATA_DIR)
@classmethod
def tearDownClass(cls):
shutil.rmtree(config.DATA_DIR)
class RTRSSDataBaseTestCase(RTRSSTestCase):
def setUp(self):
database.clear(engine)
database.init(engine)
self.db = database.Session()
def tearDown(self):
database.clear(engine)
self.db.close()
|
...
import logging
import unittest
import os
import shutil
from sqlalchemy import create_engine
...
@classmethod
def tearDownClass(cls):
shutil.rmtree(config.DATA_DIR)
class RTRSSDataBaseTestCase(RTRSSTestCase):
...
|
67c58e3941491d276318daf568354f1e17ef3892
|
omics/gsa/__init__.py
|
omics/gsa/__init__.py
|
from GeneSetCollection import GeneSetCollection
|
from GeneSetCollection import GeneSetCollect
def enrichment(gene_list, gene_set, background, alternative="two-sided", verbose=True):
"""Gene set enrichment analysis by Fisher Exact Test.
gene_list : query gene list
gene_set : predefined gene set
background : background gene set
alternative: {'two-sided', 'less', 'greater'}, optional
verbose : print results or not
Return: odds ratio (prior), p-value.
See http://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.fisher_exact.html
To-do: to support a whole genome for default background?
"""
from scipy.stats import fisher_exact
from math import log10
L = set(gene_list) & set(background)
S = set(gene_set) & set(background)
a = len(L & S)
b = len(L) - a
c = len(S) - a
d = len(background) - (a + b + c)
oddsratio, p_value = fisher_exact([[a, b], [c, d]], alternative)
if verbose:
print "2x2 contingency table:"
print "\t%d\t%d" % (a, b)
print "\t%d\t%d" % (c, d)
print "odds ratio:\t%f" % oddsratio
print "%s P-val:\t%g" % (alternative, p_value)
print "-log(P-val):\t%f" % -log10(p_value)
return oddsratio, p_value
|
Add enrichment function in gsa module
|
Add enrichment function in gsa module
|
Python
|
mit
|
choyichen/omics
|
python
|
## Code Before:
from GeneSetCollection import GeneSetCollection
## Instruction:
Add enrichment function in gsa module
## Code After:
from GeneSetCollection import GeneSetCollect
def enrichment(gene_list, gene_set, background, alternative="two-sided", verbose=True):
"""Gene set enrichment analysis by Fisher Exact Test.
gene_list : query gene list
gene_set : predefined gene set
background : background gene set
alternative: {'two-sided', 'less', 'greater'}, optional
verbose : print results or not
Return: odds ratio (prior), p-value.
See http://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.fisher_exact.html
To-do: to support a whole genome for default background?
"""
from scipy.stats import fisher_exact
from math import log10
L = set(gene_list) & set(background)
S = set(gene_set) & set(background)
a = len(L & S)
b = len(L) - a
c = len(S) - a
d = len(background) - (a + b + c)
oddsratio, p_value = fisher_exact([[a, b], [c, d]], alternative)
if verbose:
print "2x2 contingency table:"
print "\t%d\t%d" % (a, b)
print "\t%d\t%d" % (c, d)
print "odds ratio:\t%f" % oddsratio
print "%s P-val:\t%g" % (alternative, p_value)
print "-log(P-val):\t%f" % -log10(p_value)
return oddsratio, p_value
|
// ... existing code ...
from GeneSetCollection import GeneSetCollect
def enrichment(gene_list, gene_set, background, alternative="two-sided", verbose=True):
"""Gene set enrichment analysis by Fisher Exact Test.
gene_list : query gene list
gene_set : predefined gene set
background : background gene set
alternative: {'two-sided', 'less', 'greater'}, optional
verbose : print results or not
Return: odds ratio (prior), p-value.
See http://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.fisher_exact.html
To-do: to support a whole genome for default background?
"""
from scipy.stats import fisher_exact
from math import log10
L = set(gene_list) & set(background)
S = set(gene_set) & set(background)
a = len(L & S)
b = len(L) - a
c = len(S) - a
d = len(background) - (a + b + c)
oddsratio, p_value = fisher_exact([[a, b], [c, d]], alternative)
if verbose:
print "2x2 contingency table:"
print "\t%d\t%d" % (a, b)
print "\t%d\t%d" % (c, d)
print "odds ratio:\t%f" % oddsratio
print "%s P-val:\t%g" % (alternative, p_value)
print "-log(P-val):\t%f" % -log10(p_value)
return oddsratio, p_value
// ... rest of the code ...
|
6a2d68795e004ab0531a114c927f2898fac57433
|
src/main/java/net/ripe/db/whois/common/Message.java
|
src/main/java/net/ripe/db/whois/common/Message.java
|
package net.ripe.db.whois.common;
import javax.annotation.concurrent.Immutable;
import java.util.regex.Pattern;
@Immutable
public final class Message {
public static final Pattern BEGINNING_OF_LINE_PERCENT_SIGNS = Pattern.compile("^%+ ");
private final Messages.Type type;
private final String text;
private final Object[] args;
private final String formattedText;
public Message(final Messages.Type type, final String text, final Object... args) {
this.type = type;
this.text = text;
this.args = args;
this.formattedText = args.length == 0 ? text : String.format(text, args);
}
@Override
public String toString() {
return formattedText;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Message message = (Message) o;
return type == message.type && formattedText.equals(message.formattedText);
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + formattedText.hashCode();
return result;
}
public Messages.Type getType() {
return type;
}
public String getFormattedText() {
return formattedText;
}
public String getText() {
return text;
}
public Object[] getArgs() {
return args;
}
}
|
package net.ripe.db.whois.common;
import javax.annotation.concurrent.Immutable;
@Immutable
public final class Message {
private final Messages.Type type;
private final String text;
private final Object[] args;
private final String formattedText;
public Message(final Messages.Type type, final String text, final Object... args) {
this.type = type;
this.text = text;
this.args = args;
this.formattedText = args.length == 0 ? text : String.format(text, args);
}
@Override
public String toString() {
return formattedText;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Message message = (Message) o;
return type == message.type && formattedText.equals(message.formattedText);
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + formattedText.hashCode();
return result;
}
public Messages.Type getType() {
return type;
}
public String getFormattedText() {
return formattedText;
}
public String getText() {
return text;
}
public Object[] getArgs() {
return args;
}
}
|
Print all the messages on a QueryException (not just the length).
|
Print all the messages on a QueryException (not just the length).
|
Java
|
agpl-3.0
|
rpsl4j/rpsl4j-parser,rpsl4j/rpsl4j-parser,rpsl4j/rpsl4j-parser
|
java
|
## Code Before:
package net.ripe.db.whois.common;
import javax.annotation.concurrent.Immutable;
import java.util.regex.Pattern;
@Immutable
public final class Message {
public static final Pattern BEGINNING_OF_LINE_PERCENT_SIGNS = Pattern.compile("^%+ ");
private final Messages.Type type;
private final String text;
private final Object[] args;
private final String formattedText;
public Message(final Messages.Type type, final String text, final Object... args) {
this.type = type;
this.text = text;
this.args = args;
this.formattedText = args.length == 0 ? text : String.format(text, args);
}
@Override
public String toString() {
return formattedText;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Message message = (Message) o;
return type == message.type && formattedText.equals(message.formattedText);
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + formattedText.hashCode();
return result;
}
public Messages.Type getType() {
return type;
}
public String getFormattedText() {
return formattedText;
}
public String getText() {
return text;
}
public Object[] getArgs() {
return args;
}
}
## Instruction:
Print all the messages on a QueryException (not just the length).
## Code After:
package net.ripe.db.whois.common;
import javax.annotation.concurrent.Immutable;
@Immutable
public final class Message {
private final Messages.Type type;
private final String text;
private final Object[] args;
private final String formattedText;
public Message(final Messages.Type type, final String text, final Object... args) {
this.type = type;
this.text = text;
this.args = args;
this.formattedText = args.length == 0 ? text : String.format(text, args);
}
@Override
public String toString() {
return formattedText;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Message message = (Message) o;
return type == message.type && formattedText.equals(message.formattedText);
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + formattedText.hashCode();
return result;
}
public Messages.Type getType() {
return type;
}
public String getFormattedText() {
return formattedText;
}
public String getText() {
return text;
}
public Object[] getArgs() {
return args;
}
}
|
...
package net.ripe.db.whois.common;
import javax.annotation.concurrent.Immutable;
@Immutable
public final class Message {
private final Messages.Type type;
private final String text;
...
|
a2b4a2dcb9d2b009347a838c6f2c3895eb7e23b9
|
src/graphics/buffer_lock_manager.h
|
src/graphics/buffer_lock_manager.h
|
namespace Graphics
{
struct BufferRange
{
size_t startOffset;
size_t length;
size_t endOffset() const
{
return startOffset + length;
}
bool overlaps(const BufferRange &other) const
{
return startOffset < other.endOffset() && other.startOffset < endOffset();
}
};
struct BufferLock
{
BufferRange range;
GLsync syncObject;
};
/**
* \brief
*
*
*/
class BufferLockManager
{
public:
explicit BufferLockManager(bool runUpdatesOnCPU);
~BufferLockManager();
void initialize(Gl *gl);
void waitForLockedRange(size_t lockBeginBytes, size_t lockLength);
void lockRange(size_t lockBeginBytes, size_t lockLength);
private:
void wait(GLsync *syncObject);
void cleanup(BufferLock *bufferLock);
std::vector<BufferLock> bufferLocks;
bool runUpdatesOnCPU;
Gl *gl;
};
} // namespace Graphics
#endif // SRC_GRAPHICS_BUFFER_LOCK_MANAGER_H_
|
namespace Graphics
{
/**
* \brief Encapsulates a locked buffer range
*
* It stores the start offset in the buffer and the length of the range.
* The end offset can be retrieved with #endOffset().
*
* The method #overlaps() checks if the range overlaps with the
* given range.
*/
struct BufferRange
{
size_t startOffset;
size_t length;
size_t endOffset() const
{
return startOffset + length;
}
bool overlaps(const BufferRange &other) const
{
return startOffset < other.endOffset() && other.startOffset < endOffset();
}
};
/**
* \brief Lock on a buffer determined by \link BufferRange range \endlink and
* a sync object
*/
struct BufferLock
{
BufferRange range;
GLsync syncObject;
};
/**
* \brief Manages locks for a buffer
*
* Locks can be acquired with #lockRange() and #waitForLockedRange() waits until
* the range is free again.
*/
class BufferLockManager
{
public:
explicit BufferLockManager(bool runUpdatesOnCPU);
~BufferLockManager();
void initialize(Gl *gl);
void waitForLockedRange(size_t lockBeginBytes, size_t lockLength);
void lockRange(size_t lockBeginBytes, size_t lockLength);
private:
void wait(GLsync *syncObject);
void cleanup(BufferLock *bufferLock);
std::vector<BufferLock> bufferLocks;
bool runUpdatesOnCPU;
Gl *gl;
};
} // namespace Graphics
#endif // SRC_GRAPHICS_BUFFER_LOCK_MANAGER_H_
|
Add Documentation for BufferLockManger, BufferRange and BufferLock.
|
Add Documentation for BufferLockManger, BufferRange and BufferLock.
|
C
|
mit
|
Christof/voly-labeller,Christof/voly-labeller,Christof/voly-labeller,Christof/voly-labeller
|
c
|
## Code Before:
namespace Graphics
{
struct BufferRange
{
size_t startOffset;
size_t length;
size_t endOffset() const
{
return startOffset + length;
}
bool overlaps(const BufferRange &other) const
{
return startOffset < other.endOffset() && other.startOffset < endOffset();
}
};
struct BufferLock
{
BufferRange range;
GLsync syncObject;
};
/**
* \brief
*
*
*/
class BufferLockManager
{
public:
explicit BufferLockManager(bool runUpdatesOnCPU);
~BufferLockManager();
void initialize(Gl *gl);
void waitForLockedRange(size_t lockBeginBytes, size_t lockLength);
void lockRange(size_t lockBeginBytes, size_t lockLength);
private:
void wait(GLsync *syncObject);
void cleanup(BufferLock *bufferLock);
std::vector<BufferLock> bufferLocks;
bool runUpdatesOnCPU;
Gl *gl;
};
} // namespace Graphics
#endif // SRC_GRAPHICS_BUFFER_LOCK_MANAGER_H_
## Instruction:
Add Documentation for BufferLockManger, BufferRange and BufferLock.
## Code After:
namespace Graphics
{
/**
* \brief Encapsulates a locked buffer range
*
* It stores the start offset in the buffer and the length of the range.
* The end offset can be retrieved with #endOffset().
*
* The method #overlaps() checks if the range overlaps with the
* given range.
*/
struct BufferRange
{
size_t startOffset;
size_t length;
size_t endOffset() const
{
return startOffset + length;
}
bool overlaps(const BufferRange &other) const
{
return startOffset < other.endOffset() && other.startOffset < endOffset();
}
};
/**
* \brief Lock on a buffer determined by \link BufferRange range \endlink and
* a sync object
*/
struct BufferLock
{
BufferRange range;
GLsync syncObject;
};
/**
* \brief Manages locks for a buffer
*
* Locks can be acquired with #lockRange() and #waitForLockedRange() waits until
* the range is free again.
*/
class BufferLockManager
{
public:
explicit BufferLockManager(bool runUpdatesOnCPU);
~BufferLockManager();
void initialize(Gl *gl);
void waitForLockedRange(size_t lockBeginBytes, size_t lockLength);
void lockRange(size_t lockBeginBytes, size_t lockLength);
private:
void wait(GLsync *syncObject);
void cleanup(BufferLock *bufferLock);
std::vector<BufferLock> bufferLocks;
bool runUpdatesOnCPU;
Gl *gl;
};
} // namespace Graphics
#endif // SRC_GRAPHICS_BUFFER_LOCK_MANAGER_H_
|
// ... existing code ...
namespace Graphics
{
/**
* \brief Encapsulates a locked buffer range
*
* It stores the start offset in the buffer and the length of the range.
* The end offset can be retrieved with #endOffset().
*
* The method #overlaps() checks if the range overlaps with the
* given range.
*/
struct BufferRange
{
size_t startOffset;
// ... modified code ...
}
};
/**
* \brief Lock on a buffer determined by \link BufferRange range \endlink and
* a sync object
*/
struct BufferLock
{
BufferRange range;
...
};
/**
* \brief Manages locks for a buffer
*
* Locks can be acquired with #lockRange() and #waitForLockedRange() waits until
* the range is free again.
*/
class BufferLockManager
{
// ... rest of the code ...
|
fc09e847a5435581738a32f8aa158e7d03491b94
|
calico_containers/tests/st/test_container_to_host.py
|
calico_containers/tests/st/test_container_to_host.py
|
from subprocess import CalledProcessError
from test_base import TestBase
from tests.st.utils.docker_host import DockerHost
class TestContainerToHost(TestBase):
def test_container_to_host(self):
"""
Test that a container can ping the host. (Without using the docker
network driver, since it doesn't support that yet.)
This function is important for Mesos, since the containerized executor
needs to exchange messages with the Mesos Slave process on the host.
"""
with DockerHost('host', dind=False) as host:
host.calicoctl("profile add TEST")
# Use standard docker bridge networking.
node1 = host.create_workload("node1")
# Add the nodes to Calico networking.
host.calicoctl("container add %s 192.168.100.1" % node1)
# Get the endpoint IDs for the containers
ep1 = host.calicoctl("container %s endpoint-id show" % node1)
# Now add the profiles.
host.calicoctl("endpoint %s profile set TEST" % ep1)
# Check it works. Note that the profile allows all outgoing
# traffic by default, and conntrack should allow the reply.
node1.assert_can_ping(host.ip, retries=10)
# Test the teardown commands
host.calicoctl("profile remove TEST")
host.calicoctl("container remove %s" % node1)
host.calicoctl("pool remove 192.168.0.0/16")
host.calicoctl("node stop")
|
from subprocess import CalledProcessError
from test_base import TestBase
from tests.st.utils.docker_host import DockerHost
class TestContainerToHost(TestBase):
def test_container_to_host(self):
"""
Test that a container can ping the host.
This function is important for Mesos, since the containerized executor
needs to exchange messages with the Mesos Slave process on the host.
Note also that we do not use the Docker Network driver for this test.
The Docker Container Network Model defines a "network" as a group of
endpoints that can communicate with each other, but are isolated from
everything else. Thus, an endpoint of a Docker network should not be
able to ping the host.
"""
with DockerHost('host', dind=False) as host:
host.calicoctl("profile add TEST")
# Use standard docker bridge networking.
node1 = host.create_workload("node1")
# Add the nodes to Calico networking.
host.calicoctl("container add %s 192.168.100.1" % node1)
# Get the endpoint IDs for the containers
ep1 = host.calicoctl("container %s endpoint-id show" % node1)
# Now add the profiles.
host.calicoctl("endpoint %s profile set TEST" % ep1)
# Check it works. Note that the profile allows all outgoing
# traffic by default, and conntrack should allow the reply.
node1.assert_can_ping(host.ip, retries=10)
|
Clarify test_containers_to_host not using libnetwork
|
Clarify test_containers_to_host not using libnetwork
Former-commit-id: fbd7c3b5627ba288ac400944ee242f3369143291
|
Python
|
apache-2.0
|
plwhite/libcalico,TrimBiggs/libcalico,caseydavenport/libcalico,alexhersh/libcalico,insequent/libcalico,tomdee/libnetwork-plugin,projectcalico/libcalico,TrimBiggs/libnetwork-plugin,djosborne/libcalico,TrimBiggs/libnetwork-plugin,tomdee/libcalico,L-MA/libcalico,robbrockbank/libcalico,Symmetric/libcalico,projectcalico/libnetwork-plugin
|
python
|
## Code Before:
from subprocess import CalledProcessError
from test_base import TestBase
from tests.st.utils.docker_host import DockerHost
class TestContainerToHost(TestBase):
def test_container_to_host(self):
"""
Test that a container can ping the host. (Without using the docker
network driver, since it doesn't support that yet.)
This function is important for Mesos, since the containerized executor
needs to exchange messages with the Mesos Slave process on the host.
"""
with DockerHost('host', dind=False) as host:
host.calicoctl("profile add TEST")
# Use standard docker bridge networking.
node1 = host.create_workload("node1")
# Add the nodes to Calico networking.
host.calicoctl("container add %s 192.168.100.1" % node1)
# Get the endpoint IDs for the containers
ep1 = host.calicoctl("container %s endpoint-id show" % node1)
# Now add the profiles.
host.calicoctl("endpoint %s profile set TEST" % ep1)
# Check it works. Note that the profile allows all outgoing
# traffic by default, and conntrack should allow the reply.
node1.assert_can_ping(host.ip, retries=10)
# Test the teardown commands
host.calicoctl("profile remove TEST")
host.calicoctl("container remove %s" % node1)
host.calicoctl("pool remove 192.168.0.0/16")
host.calicoctl("node stop")
## Instruction:
Clarify test_containers_to_host not using libnetwork
Former-commit-id: fbd7c3b5627ba288ac400944ee242f3369143291
## Code After:
from subprocess import CalledProcessError
from test_base import TestBase
from tests.st.utils.docker_host import DockerHost
class TestContainerToHost(TestBase):
def test_container_to_host(self):
"""
Test that a container can ping the host.
This function is important for Mesos, since the containerized executor
needs to exchange messages with the Mesos Slave process on the host.
Note also that we do not use the Docker Network driver for this test.
The Docker Container Network Model defines a "network" as a group of
endpoints that can communicate with each other, but are isolated from
everything else. Thus, an endpoint of a Docker network should not be
able to ping the host.
"""
with DockerHost('host', dind=False) as host:
host.calicoctl("profile add TEST")
# Use standard docker bridge networking.
node1 = host.create_workload("node1")
# Add the nodes to Calico networking.
host.calicoctl("container add %s 192.168.100.1" % node1)
# Get the endpoint IDs for the containers
ep1 = host.calicoctl("container %s endpoint-id show" % node1)
# Now add the profiles.
host.calicoctl("endpoint %s profile set TEST" % ep1)
# Check it works. Note that the profile allows all outgoing
# traffic by default, and conntrack should allow the reply.
node1.assert_can_ping(host.ip, retries=10)
|
// ... existing code ...
class TestContainerToHost(TestBase):
def test_container_to_host(self):
"""
Test that a container can ping the host.
This function is important for Mesos, since the containerized executor
needs to exchange messages with the Mesos Slave process on the host.
Note also that we do not use the Docker Network driver for this test.
The Docker Container Network Model defines a "network" as a group of
endpoints that can communicate with each other, but are isolated from
everything else. Thus, an endpoint of a Docker network should not be
able to ping the host.
"""
with DockerHost('host', dind=False) as host:
host.calicoctl("profile add TEST")
// ... modified code ...
# Check it works. Note that the profile allows all outgoing
# traffic by default, and conntrack should allow the reply.
node1.assert_can_ping(host.ip, retries=10)
// ... rest of the code ...
|
81dfb5cb952fbca90882bd39e76887f0fa6479eb
|
msmexplorer/tests/test_msm_plot.py
|
msmexplorer/tests/test_msm_plot.py
|
import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
|
import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
def test_plot_implied_timescales():
lag_times = [1, 10, 50, 100, 200, 250, 500]
msm_objs = []
for lag in lag_times:
# Construct MSM
msm = MarkovStateModel(lag_time=lag, n_timescales=5)
msm.fit(clustered_trajs)
msm_objs.append(msm)
ax = plot_implied_timescales(msm_objs)
assert isinstance(ax, SubplotBase)
|
Add test for implied timescales plot
|
Add test for implied timescales plot
|
Python
|
mit
|
msmexplorer/msmexplorer,msmexplorer/msmexplorer
|
python
|
## Code Before:
import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
## Instruction:
Add test for implied timescales plot
## Code After:
import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
def test_plot_implied_timescales():
lag_times = [1, 10, 50, 100, 200, 250, 500]
msm_objs = []
for lag in lag_times:
# Construct MSM
msm = MarkovStateModel(lag_time=lag, n_timescales=5)
msm.fit(clustered_trajs)
msm_objs.append(msm)
ax = plot_implied_timescales(msm_objs)
assert isinstance(ax, SubplotBase)
|
// ... existing code ...
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
// ... modified code ...
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
def test_plot_implied_timescales():
lag_times = [1, 10, 50, 100, 200, 250, 500]
msm_objs = []
for lag in lag_times:
# Construct MSM
msm = MarkovStateModel(lag_time=lag, n_timescales=5)
msm.fit(clustered_trajs)
msm_objs.append(msm)
ax = plot_implied_timescales(msm_objs)
assert isinstance(ax, SubplotBase)
// ... rest of the code ...
|
da59e27fe4176e23ec305f9a7143b288521db6ef
|
src/ggrc/converters/handlers/request.py
|
src/ggrc/converters/handlers/request.py
|
"""Handlers for request specific columns."""
from ggrc.converters.handlers import handlers
from ggrc.converters import errors
class RequestStatusColumnHandler(handlers.StatusColumnHandler):
"""Handler for request status."""
def parse_item(self):
"""Parse raw_value into a valid request status if possible."""
value = handlers.StatusColumnHandler.parse_item(self)
if value in {"Final", "Verified"}:
value = "In Progress"
self.add_warning(errors.REQUEST_INVALID_STATE)
return value
|
"""Handlers for request specific columns."""
from ggrc.converters.handlers import handlers
from ggrc.converters import errors
from ggrc import models
class RequestStatusColumnHandler(handlers.StatusColumnHandler):
"""Handler for request status."""
def parse_item(self):
"""Parse raw_value into a valid request status if possible."""
value = handlers.StatusColumnHandler.parse_item(self)
if value in models.Request.END_STATES:
value = models.Request.PROGRESS_STATE
self.add_warning(errors.REQUEST_INVALID_STATE)
return value
|
Update Request object import to use statusable attributes
|
Update Request object import to use statusable attributes
|
Python
|
apache-2.0
|
kr41/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core
|
python
|
## Code Before:
"""Handlers for request specific columns."""
from ggrc.converters.handlers import handlers
from ggrc.converters import errors
class RequestStatusColumnHandler(handlers.StatusColumnHandler):
"""Handler for request status."""
def parse_item(self):
"""Parse raw_value into a valid request status if possible."""
value = handlers.StatusColumnHandler.parse_item(self)
if value in {"Final", "Verified"}:
value = "In Progress"
self.add_warning(errors.REQUEST_INVALID_STATE)
return value
## Instruction:
Update Request object import to use statusable attributes
## Code After:
"""Handlers for request specific columns."""
from ggrc.converters.handlers import handlers
from ggrc.converters import errors
from ggrc import models
class RequestStatusColumnHandler(handlers.StatusColumnHandler):
"""Handler for request status."""
def parse_item(self):
"""Parse raw_value into a valid request status if possible."""
value = handlers.StatusColumnHandler.parse_item(self)
if value in models.Request.END_STATES:
value = models.Request.PROGRESS_STATE
self.add_warning(errors.REQUEST_INVALID_STATE)
return value
|
# ... existing code ...
from ggrc.converters.handlers import handlers
from ggrc.converters import errors
from ggrc import models
class RequestStatusColumnHandler(handlers.StatusColumnHandler):
# ... modified code ...
def parse_item(self):
"""Parse raw_value into a valid request status if possible."""
value = handlers.StatusColumnHandler.parse_item(self)
if value in models.Request.END_STATES:
value = models.Request.PROGRESS_STATE
self.add_warning(errors.REQUEST_INVALID_STATE)
return value
# ... rest of the code ...
|
31193e4a68582e1b495f8fbf9bdf7904ec44b304
|
src/kernel/task/resource/vfork/res_vfork.c
|
src/kernel/task/resource/vfork/res_vfork.c
|
/**
* @file res_vfork.c
* @brief Task resource for vfork
* @date May 16, 2014
* @author Anton Bondarev
*/
#include <stddef.h>
#include <setjmp.h>
#include <kernel/task.h>
#include <kernel/task/resource.h>
#include <kernel/task/resource/task_vfork.h>
TASK_RESOURCE_DEF(task_vfork_desc, struct task_vfork);
static void task_vfork_init(const struct task *task, void *vfork_buff) {
task_vfork_end((struct task *)task);
}
static int task_vfork_inherit(const struct task *task,
const struct task *parent) {
return 0;
}
static size_t task_vfork_offset;
static const struct task_resource_desc task_vfork_desc = {
.init = task_vfork_init,
.inherit = task_vfork_inherit,
.resource_size = sizeof(struct task_vfork),
.resource_offset = &task_vfork_offset
};
struct task_vfork *task_resource_vfork(const struct task *task) {
assert(task != NULL);
return (void *)task->resources + task_vfork_offset;
}
|
/**
* @file res_vfork.c
* @brief Task resource for vfork
* @date May 16, 2014
* @author Anton Bondarev
*/
#include <stddef.h>
#include <setjmp.h>
#include <util/binalign.h>
#include <kernel/task.h>
#include <kernel/task/resource.h>
#include <kernel/task/resource/task_vfork.h>
TASK_RESOURCE_DEF(task_vfork_desc, struct task_vfork);
static void task_vfork_init(const struct task *task, void *vfork_buff) {
task_vfork_end((struct task *)task);
}
static int task_vfork_inherit(const struct task *task,
const struct task *parent) {
return 0;
}
static size_t task_vfork_offset;
static const struct task_resource_desc task_vfork_desc = {
.init = task_vfork_init,
.inherit = task_vfork_inherit,
.resource_size = sizeof(struct task_vfork),
.resource_offset = &task_vfork_offset,
};
struct task_vfork *task_resource_vfork(const struct task *task) {
size_t offset;
assert(task != NULL);
offset = (size_t)((void *)task->resources + task_vfork_offset);
#ifdef PT_REGS_ALIGN
return (void *)binalign_bound(offset, PT_REGS_ALIGN);
#else
return (void *)offset;
#endif
}
|
Add alignment for 'vfork' task resource
|
kernel: Add alignment for 'vfork' task resource
|
C
|
bsd-2-clause
|
embox/embox,embox/embox,embox/embox,embox/embox,embox/embox,embox/embox
|
c
|
## Code Before:
/**
* @file res_vfork.c
* @brief Task resource for vfork
* @date May 16, 2014
* @author Anton Bondarev
*/
#include <stddef.h>
#include <setjmp.h>
#include <kernel/task.h>
#include <kernel/task/resource.h>
#include <kernel/task/resource/task_vfork.h>
TASK_RESOURCE_DEF(task_vfork_desc, struct task_vfork);
static void task_vfork_init(const struct task *task, void *vfork_buff) {
task_vfork_end((struct task *)task);
}
static int task_vfork_inherit(const struct task *task,
const struct task *parent) {
return 0;
}
static size_t task_vfork_offset;
static const struct task_resource_desc task_vfork_desc = {
.init = task_vfork_init,
.inherit = task_vfork_inherit,
.resource_size = sizeof(struct task_vfork),
.resource_offset = &task_vfork_offset
};
struct task_vfork *task_resource_vfork(const struct task *task) {
assert(task != NULL);
return (void *)task->resources + task_vfork_offset;
}
## Instruction:
kernel: Add alignment for 'vfork' task resource
## Code After:
/**
* @file res_vfork.c
* @brief Task resource for vfork
* @date May 16, 2014
* @author Anton Bondarev
*/
#include <stddef.h>
#include <setjmp.h>
#include <util/binalign.h>
#include <kernel/task.h>
#include <kernel/task/resource.h>
#include <kernel/task/resource/task_vfork.h>
TASK_RESOURCE_DEF(task_vfork_desc, struct task_vfork);
static void task_vfork_init(const struct task *task, void *vfork_buff) {
task_vfork_end((struct task *)task);
}
static int task_vfork_inherit(const struct task *task,
const struct task *parent) {
return 0;
}
static size_t task_vfork_offset;
static const struct task_resource_desc task_vfork_desc = {
.init = task_vfork_init,
.inherit = task_vfork_inherit,
.resource_size = sizeof(struct task_vfork),
.resource_offset = &task_vfork_offset,
};
struct task_vfork *task_resource_vfork(const struct task *task) {
size_t offset;
assert(task != NULL);
offset = (size_t)((void *)task->resources + task_vfork_offset);
#ifdef PT_REGS_ALIGN
return (void *)binalign_bound(offset, PT_REGS_ALIGN);
#else
return (void *)offset;
#endif
}
|
// ... existing code ...
#include <stddef.h>
#include <setjmp.h>
#include <util/binalign.h>
#include <kernel/task.h>
#include <kernel/task/resource.h>
// ... modified code ...
.init = task_vfork_init,
.inherit = task_vfork_inherit,
.resource_size = sizeof(struct task_vfork),
.resource_offset = &task_vfork_offset,
};
struct task_vfork *task_resource_vfork(const struct task *task) {
size_t offset;
assert(task != NULL);
offset = (size_t)((void *)task->resources + task_vfork_offset);
#ifdef PT_REGS_ALIGN
return (void *)binalign_bound(offset, PT_REGS_ALIGN);
#else
return (void *)offset;
#endif
}
// ... rest of the code ...
|
c843186e2d38fa8bfc184da4e080a2377bcde3b0
|
dashbuilder-showcase/dashbuilder-webapp/src/main/java/org/dashbuilder/client/perspectives/MainPerspective.java
|
dashbuilder-showcase/dashbuilder-webapp/src/main/java/org/dashbuilder/client/perspectives/MainPerspective.java
|
package org.dashbuilder.client.perspectives;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import org.uberfire.client.annotations.Perspective;
import org.uberfire.client.annotations.WorkbenchPerspective;
import org.uberfire.mvp.impl.DefaultPlaceRequest;
import org.uberfire.workbench.model.PanelType;
import org.uberfire.workbench.model.PerspectiveDefinition;
import org.uberfire.workbench.model.impl.PartDefinitionImpl;
import org.uberfire.workbench.model.impl.PerspectiveDefinitionImpl;
/**
* A Perspective to show File Explorer
*/
@ApplicationScoped
@WorkbenchPerspective(identifier = "MainPerspective", isDefault = true)
public class MainPerspective {
private PerspectiveDefinition perspective;
@PostConstruct
public void init() {
buildPerspective();
}
@Perspective
public PerspectiveDefinition getPerspective() {
return this.perspective;
}
public PerspectiveDefinition buildPerspective() {
perspective = new PerspectiveDefinitionImpl( PanelType.ROOT_STATIC);
perspective.setTransient(true);
perspective.setName("MainPerspective");
//perspective.getRoot().addPart(new PartDefinitionImpl(new DefaultPlaceRequest("HomeScreen")));
perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "GalleryScreen" ) ) );
//perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "StaticChartScreen" ) ) );
//perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "DisplayerScreen" ).addParameter("uuid", "opps-country-summary") ) );
return perspective;
}
}
|
package org.dashbuilder.client.perspectives;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import org.uberfire.client.annotations.Perspective;
import org.uberfire.client.annotations.WorkbenchPerspective;
import org.uberfire.mvp.impl.DefaultPlaceRequest;
import org.uberfire.workbench.model.PanelType;
import org.uberfire.workbench.model.PerspectiveDefinition;
import org.uberfire.workbench.model.impl.PartDefinitionImpl;
import org.uberfire.workbench.model.impl.PerspectiveDefinitionImpl;
/**
* A Perspective to show File Explorer
*/
@ApplicationScoped
@WorkbenchPerspective(identifier = "MainPerspective", isDefault = true)
public class MainPerspective {
@Perspective
public PerspectiveDefinition buildPerspective() {
PerspectiveDefinition perspective = new PerspectiveDefinitionImpl( PanelType.ROOT_STATIC);
perspective.setTransient(true);
perspective.setName("MainPerspective");
//perspective.getRoot().addPart(new PartDefinitionImpl(new DefaultPlaceRequest("HomeScreen")));
perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "GalleryScreen" ) ) );
//perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "StaticChartScreen" ) ) );
//perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "DisplayerScreen" ).addParameter("uuid", "opps-country-summary") ) );
return perspective;
}
}
|
Fix perspective initialization issue when clicking on top menu navbar
|
Fix perspective initialization issue when clicking on top menu navbar
|
Java
|
apache-2.0
|
dashbuilder/dashbuilder,jhrcek/dashbuilder,jhrcek/dashbuilder,fariagu/dashbuilder,porcelli-forks/dashbuilder,jhrcek/dashbuilder,dgutierr/dashbuilder,psiroky/dashbuilder,romartin/dashbuilder,Salaboy/dashbuilder,fariagu/dashbuilder,Salaboy/dashbuilder,fariagu/dashbuilder,dgutierr/dashbuilder,romartin/dashbuilder,dgutierr/dashbuilder,porcelli-forks/dashbuilder,cristianonicolai/dashbuilder,mbiarnes/dashbuilder,cristianonicolai/dashbuilder,psiroky/dashbuilder,mbiarnes/dashbuilder,dashbuilder/dashbuilder,dashbuilder/dashbuilder,romartin/dashbuilder,ryanzhang/dashbuilder,mbiarnes/dashbuilder
|
java
|
## Code Before:
package org.dashbuilder.client.perspectives;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import org.uberfire.client.annotations.Perspective;
import org.uberfire.client.annotations.WorkbenchPerspective;
import org.uberfire.mvp.impl.DefaultPlaceRequest;
import org.uberfire.workbench.model.PanelType;
import org.uberfire.workbench.model.PerspectiveDefinition;
import org.uberfire.workbench.model.impl.PartDefinitionImpl;
import org.uberfire.workbench.model.impl.PerspectiveDefinitionImpl;
/**
* A Perspective to show File Explorer
*/
@ApplicationScoped
@WorkbenchPerspective(identifier = "MainPerspective", isDefault = true)
public class MainPerspective {
private PerspectiveDefinition perspective;
@PostConstruct
public void init() {
buildPerspective();
}
@Perspective
public PerspectiveDefinition getPerspective() {
return this.perspective;
}
public PerspectiveDefinition buildPerspective() {
perspective = new PerspectiveDefinitionImpl( PanelType.ROOT_STATIC);
perspective.setTransient(true);
perspective.setName("MainPerspective");
//perspective.getRoot().addPart(new PartDefinitionImpl(new DefaultPlaceRequest("HomeScreen")));
perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "GalleryScreen" ) ) );
//perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "StaticChartScreen" ) ) );
//perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "DisplayerScreen" ).addParameter("uuid", "opps-country-summary") ) );
return perspective;
}
}
## Instruction:
Fix perspective initialization issue when clicking on top menu navbar
## Code After:
package org.dashbuilder.client.perspectives;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import org.uberfire.client.annotations.Perspective;
import org.uberfire.client.annotations.WorkbenchPerspective;
import org.uberfire.mvp.impl.DefaultPlaceRequest;
import org.uberfire.workbench.model.PanelType;
import org.uberfire.workbench.model.PerspectiveDefinition;
import org.uberfire.workbench.model.impl.PartDefinitionImpl;
import org.uberfire.workbench.model.impl.PerspectiveDefinitionImpl;
/**
* A Perspective to show File Explorer
*/
@ApplicationScoped
@WorkbenchPerspective(identifier = "MainPerspective", isDefault = true)
public class MainPerspective {
@Perspective
public PerspectiveDefinition buildPerspective() {
PerspectiveDefinition perspective = new PerspectiveDefinitionImpl( PanelType.ROOT_STATIC);
perspective.setTransient(true);
perspective.setName("MainPerspective");
//perspective.getRoot().addPart(new PartDefinitionImpl(new DefaultPlaceRequest("HomeScreen")));
perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "GalleryScreen" ) ) );
//perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "StaticChartScreen" ) ) );
//perspective.getRoot().addPart( new PartDefinitionImpl( new DefaultPlaceRequest( "DisplayerScreen" ).addParameter("uuid", "opps-country-summary") ) );
return perspective;
}
}
|
...
@WorkbenchPerspective(identifier = "MainPerspective", isDefault = true)
public class MainPerspective {
@Perspective
public PerspectiveDefinition buildPerspective() {
PerspectiveDefinition perspective = new PerspectiveDefinitionImpl( PanelType.ROOT_STATIC);
perspective.setTransient(true);
perspective.setName("MainPerspective");
//perspective.getRoot().addPart(new PartDefinitionImpl(new DefaultPlaceRequest("HomeScreen")));
...
|
d2dd941e9f7121b099877a9e31ab03aef8a54ac9
|
platform/platform-api/src/com/intellij/execution/configurations/LowPriorityProcessRunner.kt
|
platform/platform-api/src/com/intellij/execution/configurations/LowPriorityProcessRunner.kt
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.configurations
import com.intellij.openapi.util.SystemInfo
import com.intellij.openapi.util.registry.Registry
import java.io.File
/**
* @author yole
*/
fun setupLowPriorityExecution(commandLine: GeneralCommandLine, executablePath: String) {
if (!canRunLowPriority()) {
commandLine.exePath = executablePath
}
else {
if (SystemInfo.isWindows) {
// TODO
}
else {
commandLine.exePath = "/usr/bin/nice"
commandLine.addParameter("-n")
commandLine.addParameter("10")
commandLine.addParameter(executablePath)
}
}
}
private fun canRunLowPriority(): Boolean {
if (!Registry.`is`("ide.allow.low.priority.process")) {
return false
}
if (SystemInfo.isWindows) {
return false
}
else {
if (!niceExists) return false
}
return true
}
private val niceExists by lazy { File("/usr/bin/nice").exists() }
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.configurations
import com.intellij.openapi.util.SystemInfo
import com.intellij.openapi.util.registry.Registry
import java.io.File
/**
* @author yole
*/
fun setupLowPriorityExecution(commandLine: GeneralCommandLine, executablePath: String) {
if (!canRunLowPriority()) {
commandLine.exePath = executablePath
}
else {
if (SystemInfo.isWindows) {
commandLine.exePath = "cmd"
commandLine.addParameters("/c", "start", "/b", "/low", "/wait", GeneralCommandLine.inescapableQuote(""), executablePath)
}
else {
commandLine.exePath = "/usr/bin/nice"
commandLine.addParameters("-n", "10", executablePath)
}
}
}
private fun canRunLowPriority(): Boolean {
if (!Registry.`is`("ide.allow.low.priority.process")) {
return false
}
if (!SystemInfo.isWindows && !niceExists) {
return false
}
return true
}
private val niceExists by lazy { File("/usr/bin/nice").exists() }
|
Support low priority processes on Windows
|
Support low priority processes on Windows
|
Kotlin
|
apache-2.0
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
kotlin
|
## Code Before:
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.configurations
import com.intellij.openapi.util.SystemInfo
import com.intellij.openapi.util.registry.Registry
import java.io.File
/**
* @author yole
*/
fun setupLowPriorityExecution(commandLine: GeneralCommandLine, executablePath: String) {
if (!canRunLowPriority()) {
commandLine.exePath = executablePath
}
else {
if (SystemInfo.isWindows) {
// TODO
}
else {
commandLine.exePath = "/usr/bin/nice"
commandLine.addParameter("-n")
commandLine.addParameter("10")
commandLine.addParameter(executablePath)
}
}
}
private fun canRunLowPriority(): Boolean {
if (!Registry.`is`("ide.allow.low.priority.process")) {
return false
}
if (SystemInfo.isWindows) {
return false
}
else {
if (!niceExists) return false
}
return true
}
private val niceExists by lazy { File("/usr/bin/nice").exists() }
## Instruction:
Support low priority processes on Windows
## Code After:
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.configurations
import com.intellij.openapi.util.SystemInfo
import com.intellij.openapi.util.registry.Registry
import java.io.File
/**
* @author yole
*/
fun setupLowPriorityExecution(commandLine: GeneralCommandLine, executablePath: String) {
if (!canRunLowPriority()) {
commandLine.exePath = executablePath
}
else {
if (SystemInfo.isWindows) {
commandLine.exePath = "cmd"
commandLine.addParameters("/c", "start", "/b", "/low", "/wait", GeneralCommandLine.inescapableQuote(""), executablePath)
}
else {
commandLine.exePath = "/usr/bin/nice"
commandLine.addParameters("-n", "10", executablePath)
}
}
}
private fun canRunLowPriority(): Boolean {
if (!Registry.`is`("ide.allow.low.priority.process")) {
return false
}
if (!SystemInfo.isWindows && !niceExists) {
return false
}
return true
}
private val niceExists by lazy { File("/usr/bin/nice").exists() }
|
...
}
else {
if (SystemInfo.isWindows) {
commandLine.exePath = "cmd"
commandLine.addParameters("/c", "start", "/b", "/low", "/wait", GeneralCommandLine.inescapableQuote(""), executablePath)
}
else {
commandLine.exePath = "/usr/bin/nice"
commandLine.addParameters("-n", "10", executablePath)
}
}
}
...
if (!Registry.`is`("ide.allow.low.priority.process")) {
return false
}
if (!SystemInfo.isWindows && !niceExists) {
return false
}
return true
}
...
|
34a5492f40daf81df3c416878fdb2c898416a6b8
|
extensions/jaeger/runtime/src/main/java/io/quarkus/jaeger/runtime/graal/Target_LoggingReporter.java
|
extensions/jaeger/runtime/src/main/java/io/quarkus/jaeger/runtime/graal/Target_LoggingReporter.java
|
package io.quarkus.jaeger.runtime.graal;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
import io.jaegertracing.internal.JaegerSpan;
import io.jaegertracing.spi.Reporter;
@Substitute
@TargetClass(className = "io.jaegertracing.internal.reporters.LoggingReporter")
final public class Target_LoggingReporter implements Reporter {
@Substitute
public Target_LoggingReporter() {
}
@Substitute
@Override
public void report(JaegerSpan span) {
System.err.println("--- not logging: " + span);
}
@Substitute
@Override
public void close() {
}
}
|
package io.quarkus.jaeger.runtime.graal;
import org.jboss.logging.Logger;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
import io.jaegertracing.internal.JaegerSpan;
import io.jaegertracing.spi.Reporter;
@Substitute
@TargetClass(className = "io.jaegertracing.internal.reporters.LoggingReporter")
final public class Target_LoggingReporter implements Reporter {
private static final Logger LOG = Logger.getLogger(Target_LoggingReporter.class);
@Substitute
public Target_LoggingReporter() {
}
@Substitute
@Override
public void report(JaegerSpan span) {
LOG.infof("Span reported: %s", span);
}
@Substitute
@Override
public void close() {
}
}
|
Update Jaeger reporter to use logger
|
Update Jaeger reporter to use logger
Update Jaeger reporter to use proper log instead of println statement
|
Java
|
apache-2.0
|
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
|
java
|
## Code Before:
package io.quarkus.jaeger.runtime.graal;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
import io.jaegertracing.internal.JaegerSpan;
import io.jaegertracing.spi.Reporter;
@Substitute
@TargetClass(className = "io.jaegertracing.internal.reporters.LoggingReporter")
final public class Target_LoggingReporter implements Reporter {
@Substitute
public Target_LoggingReporter() {
}
@Substitute
@Override
public void report(JaegerSpan span) {
System.err.println("--- not logging: " + span);
}
@Substitute
@Override
public void close() {
}
}
## Instruction:
Update Jaeger reporter to use logger
Update Jaeger reporter to use proper log instead of println statement
## Code After:
package io.quarkus.jaeger.runtime.graal;
import org.jboss.logging.Logger;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
import io.jaegertracing.internal.JaegerSpan;
import io.jaegertracing.spi.Reporter;
@Substitute
@TargetClass(className = "io.jaegertracing.internal.reporters.LoggingReporter")
final public class Target_LoggingReporter implements Reporter {
private static final Logger LOG = Logger.getLogger(Target_LoggingReporter.class);
@Substitute
public Target_LoggingReporter() {
}
@Substitute
@Override
public void report(JaegerSpan span) {
LOG.infof("Span reported: %s", span);
}
@Substitute
@Override
public void close() {
}
}
|
// ... existing code ...
package io.quarkus.jaeger.runtime.graal;
import org.jboss.logging.Logger;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
// ... modified code ...
@TargetClass(className = "io.jaegertracing.internal.reporters.LoggingReporter")
final public class Target_LoggingReporter implements Reporter {
private static final Logger LOG = Logger.getLogger(Target_LoggingReporter.class);
@Substitute
public Target_LoggingReporter() {
...
@Substitute
@Override
public void report(JaegerSpan span) {
LOG.infof("Span reported: %s", span);
}
@Substitute
// ... rest of the code ...
|
e29b1f6243fb7f9d2322b80573617ff9a0582d01
|
pinax/blog/parsers/markdown_parser.py
|
pinax/blog/parsers/markdown_parser.py
|
from markdown import Markdown
from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE
from ..models import Image
class ImageLookupImagePattern(ImagePattern):
def sanitize_url(self, url):
if url.startswith("http"):
return url
else:
try:
image = Image.objects.get(pk=int(url))
return image.image_path.url
except Image.DoesNotExist:
pass
except ValueError:
return url
return ""
def parse(text):
md = Markdown(extensions=["codehilite"])
md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md)
html = md.convert(text)
return html
|
from markdown import Markdown
from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE
from ..models import Image
class ImageLookupImagePattern(ImagePattern):
def sanitize_url(self, url):
if url.startswith("http"):
return url
else:
try:
image = Image.objects.get(pk=int(url))
return image.image_path.url
except Image.DoesNotExist:
pass
except ValueError:
return url
return ""
def parse(text):
md = Markdown(extensions=["codehilite", "tables", "smarty", "admonition", "toc"])
md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md)
html = md.convert(text)
return html
|
Add some extensions to the markdown parser
|
Add some extensions to the markdown parser
Ultimately we should make this a setting or hookset so it could be overridden at the site level.
|
Python
|
mit
|
swilcox/pinax-blog,pinax/pinax-blog,miurahr/pinax-blog,miurahr/pinax-blog,swilcox/pinax-blog,easton402/pinax-blog,pinax/pinax-blog,pinax/pinax-blog,easton402/pinax-blog
|
python
|
## Code Before:
from markdown import Markdown
from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE
from ..models import Image
class ImageLookupImagePattern(ImagePattern):
def sanitize_url(self, url):
if url.startswith("http"):
return url
else:
try:
image = Image.objects.get(pk=int(url))
return image.image_path.url
except Image.DoesNotExist:
pass
except ValueError:
return url
return ""
def parse(text):
md = Markdown(extensions=["codehilite"])
md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md)
html = md.convert(text)
return html
## Instruction:
Add some extensions to the markdown parser
Ultimately we should make this a setting or hookset so it could be overridden at the site level.
## Code After:
from markdown import Markdown
from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE
from ..models import Image
class ImageLookupImagePattern(ImagePattern):
def sanitize_url(self, url):
if url.startswith("http"):
return url
else:
try:
image = Image.objects.get(pk=int(url))
return image.image_path.url
except Image.DoesNotExist:
pass
except ValueError:
return url
return ""
def parse(text):
md = Markdown(extensions=["codehilite", "tables", "smarty", "admonition", "toc"])
md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md)
html = md.convert(text)
return html
|
# ... existing code ...
def parse(text):
md = Markdown(extensions=["codehilite", "tables", "smarty", "admonition", "toc"])
md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md)
html = md.convert(text)
return html
# ... rest of the code ...
|
0c6825ce8b5d1e3b15505c5bcac847c6a57a782e
|
statirator/main.py
|
statirator/main.py
|
import argparse
from . import commands
VALID_ARGS = ('init', 'compile', 'serve')
def create_options():
"Add options to tornado"
parser = argparse.ArgumentParser(
'Staitrator - Static multilingual site and blog generator')
parser.add_argument('command', choices=VALID_ARGS)
init = parser.add_argument_group('Init Options')
init.add_argument('-n', '--name', default='Default site',
help='Site name and title')
init.add_argument('-c', '--site_class', default='statirator.site.Html5Site',
help='The base class for the site')
init.add_argument('-s', '--source', default='source', help="Site's source directory")
init.add_argument('-b', '--build', default='build', help="Site's build directory")
return parser
def main():
parser = create_options()
args = parser.parse_args()
cmd = getattr(commands, args[0])
cmd(args)
if __name__ == '__main__':
main()
|
import argparse
from . import commands
def create_options():
"Add options to tornado"
parser = argparse.ArgumentParser(
'Staitrator - Static multilingual site and blog generator')
sub_parsers = parser.add_subparsers(help='Sub Commands help')
init = sub_parsers.add_parser('init', help='Initiate a new site')
init.add_argument('directory', help='Target directory')
init.add_argument('-n', '--name', default='Default site',
help='Site name and title [default: %(default)s]')
init.add_argument('-c', '--site_class', default='statirator.site.Html5Site',
help='The base class for the site [default: %(default)s]')
init.add_argument('-s', '--source', default='source',
help="Site's source directory [default: %(default)s]")
init.add_argument('-b', '--build', default='build',
help="Site's build directory [default: %(default)s]")
cmpl = sub_parsers.add_parser('compile', help='Compile the new site')
serve = sub_parsers.add_parser('serve', help='Serve the site, listening '
'on a local port')
return parser
def main():
parser = create_options()
args = parser.parse_args()
cmd = getattr(commands, args[0])
cmd(args)
if __name__ == '__main__':
main()
|
Move arguments to sub parsers
|
Move arguments to sub parsers
|
Python
|
mit
|
MeirKriheli/statirator,MeirKriheli/statirator,MeirKriheli/statirator
|
python
|
## Code Before:
import argparse
from . import commands
VALID_ARGS = ('init', 'compile', 'serve')
def create_options():
"Add options to tornado"
parser = argparse.ArgumentParser(
'Staitrator - Static multilingual site and blog generator')
parser.add_argument('command', choices=VALID_ARGS)
init = parser.add_argument_group('Init Options')
init.add_argument('-n', '--name', default='Default site',
help='Site name and title')
init.add_argument('-c', '--site_class', default='statirator.site.Html5Site',
help='The base class for the site')
init.add_argument('-s', '--source', default='source', help="Site's source directory")
init.add_argument('-b', '--build', default='build', help="Site's build directory")
return parser
def main():
parser = create_options()
args = parser.parse_args()
cmd = getattr(commands, args[0])
cmd(args)
if __name__ == '__main__':
main()
## Instruction:
Move arguments to sub parsers
## Code After:
import argparse
from . import commands
def create_options():
"Add options to tornado"
parser = argparse.ArgumentParser(
'Staitrator - Static multilingual site and blog generator')
sub_parsers = parser.add_subparsers(help='Sub Commands help')
init = sub_parsers.add_parser('init', help='Initiate a new site')
init.add_argument('directory', help='Target directory')
init.add_argument('-n', '--name', default='Default site',
help='Site name and title [default: %(default)s]')
init.add_argument('-c', '--site_class', default='statirator.site.Html5Site',
help='The base class for the site [default: %(default)s]')
init.add_argument('-s', '--source', default='source',
help="Site's source directory [default: %(default)s]")
init.add_argument('-b', '--build', default='build',
help="Site's build directory [default: %(default)s]")
cmpl = sub_parsers.add_parser('compile', help='Compile the new site')
serve = sub_parsers.add_parser('serve', help='Serve the site, listening '
'on a local port')
return parser
def main():
parser = create_options()
args = parser.parse_args()
cmd = getattr(commands, args[0])
cmd(args)
if __name__ == '__main__':
main()
|
// ... existing code ...
import argparse
from . import commands
def create_options():
"Add options to tornado"
// ... modified code ...
parser = argparse.ArgumentParser(
'Staitrator - Static multilingual site and blog generator')
sub_parsers = parser.add_subparsers(help='Sub Commands help')
init = sub_parsers.add_parser('init', help='Initiate a new site')
init.add_argument('directory', help='Target directory')
init.add_argument('-n', '--name', default='Default site',
help='Site name and title [default: %(default)s]')
init.add_argument('-c', '--site_class', default='statirator.site.Html5Site',
help='The base class for the site [default: %(default)s]')
init.add_argument('-s', '--source', default='source',
help="Site's source directory [default: %(default)s]")
init.add_argument('-b', '--build', default='build',
help="Site's build directory [default: %(default)s]")
cmpl = sub_parsers.add_parser('compile', help='Compile the new site')
serve = sub_parsers.add_parser('serve', help='Serve the site, listening '
'on a local port')
return parser
// ... rest of the code ...
|
df68b821807d25d204f43d7b1805da6c25f42b43
|
src/lib/pagination.py
|
src/lib/pagination.py
|
from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class LegacyPaginator(PageNumberPagination):
"""
A legacy paginator that mocks the one from Eve Python
"""
page_size = 25
page_size_query_param = "max_results"
def get_paginated_response(self, data):
links = OrderedDict()
if self.page.has_next():
links['next'] = OrderedDict([
('href', self.get_next_link()),
('title', _('page suivante')),
])
if self.page.has_previous():
links['prev'] = OrderedDict([
('href', self.get_previous_link()),
('title', _('page précédente')),
])
meta = OrderedDict([
('max_results', self.page.paginator.per_page),
('total', self.page.paginator.count),
('page', self.page.number),
])
return Response(OrderedDict([
('_items', data),
('_links', links),
('_meta', meta),
]))
|
from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class LegacyPaginator(PageNumberPagination):
"""
A legacy paginator that mocks the one from Eve Python
"""
page_size = 25
page_size_query_param = "max_results"
max_page_size = 100
def get_paginated_response(self, data):
links = OrderedDict()
if self.page.has_next():
links['next'] = OrderedDict([
('href', self.get_next_link()),
('title', _('page suivante')),
])
if self.page.has_previous():
links['prev'] = OrderedDict([
('href', self.get_previous_link()),
('title', _('page précédente')),
])
meta = OrderedDict([
('max_results', self.page.paginator.per_page),
('total', self.page.paginator.count),
('page', self.page.number),
])
return Response(OrderedDict([
('_items', data),
('_links', links),
('_meta', meta),
]))
|
Set a maximum to the number of elements that may be requested
|
Set a maximum to the number of elements that may be requested
|
Python
|
agpl-3.0
|
lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django
|
python
|
## Code Before:
from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class LegacyPaginator(PageNumberPagination):
"""
A legacy paginator that mocks the one from Eve Python
"""
page_size = 25
page_size_query_param = "max_results"
def get_paginated_response(self, data):
links = OrderedDict()
if self.page.has_next():
links['next'] = OrderedDict([
('href', self.get_next_link()),
('title', _('page suivante')),
])
if self.page.has_previous():
links['prev'] = OrderedDict([
('href', self.get_previous_link()),
('title', _('page précédente')),
])
meta = OrderedDict([
('max_results', self.page.paginator.per_page),
('total', self.page.paginator.count),
('page', self.page.number),
])
return Response(OrderedDict([
('_items', data),
('_links', links),
('_meta', meta),
]))
## Instruction:
Set a maximum to the number of elements that may be requested
## Code After:
from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class LegacyPaginator(PageNumberPagination):
"""
A legacy paginator that mocks the one from Eve Python
"""
page_size = 25
page_size_query_param = "max_results"
max_page_size = 100
def get_paginated_response(self, data):
links = OrderedDict()
if self.page.has_next():
links['next'] = OrderedDict([
('href', self.get_next_link()),
('title', _('page suivante')),
])
if self.page.has_previous():
links['prev'] = OrderedDict([
('href', self.get_previous_link()),
('title', _('page précédente')),
])
meta = OrderedDict([
('max_results', self.page.paginator.per_page),
('total', self.page.paginator.count),
('page', self.page.number),
])
return Response(OrderedDict([
('_items', data),
('_links', links),
('_meta', meta),
]))
|
# ... existing code ...
"""
page_size = 25
page_size_query_param = "max_results"
max_page_size = 100
def get_paginated_response(self, data):
links = OrderedDict()
# ... rest of the code ...
|
ba1bfc262e023a01d6e201d48d234640a443ed96
|
raven/__init__.py
|
raven/__init__.py
|
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception, e:
VERSION = 'unknown'
from base import *
from conf import *
|
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception, e:
VERSION = 'unknown'
from raven.base import *
from raven.conf import *
|
Use absolute imports, not relative ones.
|
Use absolute imports, not relative ones.
|
Python
|
bsd-3-clause
|
hzy/raven-python,akheron/raven-python,akalipetis/raven-python,nikolas/raven-python,arthurlogilab/raven-python,inspirehep/raven-python,recht/raven-python,akheron/raven-python,arthurlogilab/raven-python,arthurlogilab/raven-python,lepture/raven-python,percipient/raven-python,collective/mr.poe,Goldmund-Wyldebeast-Wunderliebe/raven-python,someonehan/raven-python,recht/raven-python,inspirehep/raven-python,jbarbuto/raven-python,johansteffner/raven-python,recht/raven-python,icereval/raven-python,lepture/raven-python,smarkets/raven-python,hzy/raven-python,arthurlogilab/raven-python,jmp0xf/raven-python,ronaldevers/raven-python,inspirehep/raven-python,ewdurbin/raven-python,jbarbuto/raven-python,nikolas/raven-python,jmagnusson/raven-python,akheron/raven-python,nikolas/raven-python,Photonomie/raven-python,dbravender/raven-python,akalipetis/raven-python,getsentry/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,smarkets/raven-python,ronaldevers/raven-python,danriti/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,percipient/raven-python,jmagnusson/raven-python,someonehan/raven-python,jmp0xf/raven-python,lepture/raven-python,danriti/raven-python,smarkets/raven-python,smarkets/raven-python,danriti/raven-python,someonehan/raven-python,getsentry/raven-python,jmp0xf/raven-python,hzy/raven-python,icereval/raven-python,getsentry/raven-python,nikolas/raven-python,percipient/raven-python,inspirehep/raven-python,jbarbuto/raven-python,jmagnusson/raven-python,akalipetis/raven-python,dbravender/raven-python,Photonomie/raven-python,dbravender/raven-python,ewdurbin/raven-python,ronaldevers/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jbarbuto/raven-python,johansteffner/raven-python,johansteffner/raven-python,icereval/raven-python,Photonomie/raven-python,icereval/raven-python,ewdurbin/raven-python
|
python
|
## Code Before:
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception, e:
VERSION = 'unknown'
from base import *
from conf import *
## Instruction:
Use absolute imports, not relative ones.
## Code After:
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception, e:
VERSION = 'unknown'
from raven.base import *
from raven.conf import *
|
# ... existing code ...
except Exception, e:
VERSION = 'unknown'
from raven.base import *
from raven.conf import *
# ... rest of the code ...
|
608e0348f4a9c8b91e0c1237b6c0ed6893d909b0
|
src/main/java/net/ihiroky/niotty/buffer/EncodeBufferGroup.java
|
src/main/java/net/ihiroky/niotty/buffer/EncodeBufferGroup.java
|
package net.ihiroky.niotty.buffer;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
/**
* @author Hiroki Itoh
*/
public class EncodeBufferGroup implements Iterable<EncodeBuffer> {
private Deque<EncodeBuffer> group = new ArrayDeque<>();
public void addLast(EncodeBuffer encodeBuffer) {
group.addLast(encodeBuffer);
}
public void addFirst(EncodeBuffer encodeBuffer) {
group.addFirst(encodeBuffer);
}
public EncodeBuffer pollFirst() {
return group.pollFirst();
}
public EncodeBuffer pollLast() {
return group.pollLast();
}
@Override
public Iterator<EncodeBuffer> iterator() {
return group.iterator();
}
}
|
package net.ihiroky.niotty.buffer;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
/**
* @author Hiroki Itoh
*/
public class EncodeBufferGroup implements Iterable<EncodeBuffer> {
private Deque<EncodeBuffer> group = new ArrayDeque<>();
public void addLast(EncodeBuffer encodeBuffer) {
group.addLast(encodeBuffer);
}
public void addFirst(EncodeBuffer encodeBuffer) {
group.addFirst(encodeBuffer);
}
public EncodeBuffer pollFirst() {
return group.pollFirst();
}
public EncodeBuffer pollLast() {
return group.pollLast();
}
@Override
public Iterator<EncodeBuffer> iterator() {
return group.iterator();
}
public int filledBytes() {
int sum = 0;
for (EncodeBuffer encodeBuffer : group) {
sum += encodeBuffer.filledBytes();
}
return sum;
}
}
|
Add method to count filled bytes
|
Add method to count filled bytes
Calculate the sum of element's filledBytes().
|
Java
|
mit
|
ihiroky/niotty
|
java
|
## Code Before:
package net.ihiroky.niotty.buffer;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
/**
* @author Hiroki Itoh
*/
public class EncodeBufferGroup implements Iterable<EncodeBuffer> {
private Deque<EncodeBuffer> group = new ArrayDeque<>();
public void addLast(EncodeBuffer encodeBuffer) {
group.addLast(encodeBuffer);
}
public void addFirst(EncodeBuffer encodeBuffer) {
group.addFirst(encodeBuffer);
}
public EncodeBuffer pollFirst() {
return group.pollFirst();
}
public EncodeBuffer pollLast() {
return group.pollLast();
}
@Override
public Iterator<EncodeBuffer> iterator() {
return group.iterator();
}
}
## Instruction:
Add method to count filled bytes
Calculate the sum of element's filledBytes().
## Code After:
package net.ihiroky.niotty.buffer;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
/**
* @author Hiroki Itoh
*/
public class EncodeBufferGroup implements Iterable<EncodeBuffer> {
private Deque<EncodeBuffer> group = new ArrayDeque<>();
public void addLast(EncodeBuffer encodeBuffer) {
group.addLast(encodeBuffer);
}
public void addFirst(EncodeBuffer encodeBuffer) {
group.addFirst(encodeBuffer);
}
public EncodeBuffer pollFirst() {
return group.pollFirst();
}
public EncodeBuffer pollLast() {
return group.pollLast();
}
@Override
public Iterator<EncodeBuffer> iterator() {
return group.iterator();
}
public int filledBytes() {
int sum = 0;
for (EncodeBuffer encodeBuffer : group) {
sum += encodeBuffer.filledBytes();
}
return sum;
}
}
|
...
public Iterator<EncodeBuffer> iterator() {
return group.iterator();
}
public int filledBytes() {
int sum = 0;
for (EncodeBuffer encodeBuffer : group) {
sum += encodeBuffer.filledBytes();
}
return sum;
}
}
...
|
ee1c890df7c2c86192b68bd442e41226f70a3850
|
setup.py
|
setup.py
|
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='[email protected]',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='[email protected]',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Remove the obsolte comment, library is licensed under BSD.
|
Remove the obsolte comment, library is licensed under BSD.
|
Python
|
bsd-3-clause
|
Liuftvafas/python-face-client,Kami/python-face-client
|
python
|
## Code Before:
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='[email protected]',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
## Instruction:
Remove the obsolte comment, library is licensed under BSD.
## Code After:
import os
import re
from distutils.core import setup
version_re = re.compile(
r'__version__ = (\(.*?\))')
cwd = os.path.dirname(os.path.abspath(__file__))
fp = open(os.path.join(cwd, 'face_client', '__init__.py'))
version = None
for line in fp:
match = version_re.search(line)
if match:
version = eval(match.group(1))
break
else:
raise Exception('Cannot find version in __init__.py')
fp.close()
setup(name='face_client',
version='.' . join(map(str, version)),
description='face.com face recognition Python API client library',
author='Tomaž Muraus',
author_email='[email protected]',
license='BSD',
url='http://github.com/Kami/python-face-client',
download_url='http://github.com/Kami/python-face-client/',
packages=['face_client'],
provides=['face_client'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
// ... existing code ...
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Security',
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.