commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
dcc5c7be6f8463f41e1d1697bdba7fd576382259
master/rc_force.py
master/rc_force.py
rc_scheduler = ForceScheduler( name="rc build", builderNames=["package_osx10.9-x64", "package_win6.2-x64", "package_win6.2-x86", "package_tarball64", "package_tarball32", "package_tarballarm"], reason=FixedParameter(name="reason", default=""), branch=FixedParameter(name="branch", default=""), repository=FixedParameter(name="repository", default=""), project=FixedParameter(name="project", default="Packaging"), properties=[ ] ) c['schedulers'].append(rc_scheduler)
rc_scheduler = ForceScheduler( name="rc build", builderNames=["package_osx10.9-x64", "package_win6.2-x64", "package_win6.2-x86", "package_tarball64", "package_tarball32", "package_tarballarm", "package_tarballppc64le"], reason=FixedParameter(name="reason", default=""), branch=FixedParameter(name="branch", default=""), repository=FixedParameter(name="repository", default=""), project=FixedParameter(name="project", default="Packaging"), properties=[ ] ) c['schedulers'].append(rc_scheduler)
Add ppc64le tarball rc force builder
Add ppc64le tarball rc force builder
Python
mit
staticfloat/julia-buildbot,staticfloat/julia-buildbot
python
## Code Before: rc_scheduler = ForceScheduler( name="rc build", builderNames=["package_osx10.9-x64", "package_win6.2-x64", "package_win6.2-x86", "package_tarball64", "package_tarball32", "package_tarballarm"], reason=FixedParameter(name="reason", default=""), branch=FixedParameter(name="branch", default=""), repository=FixedParameter(name="repository", default=""), project=FixedParameter(name="project", default="Packaging"), properties=[ ] ) c['schedulers'].append(rc_scheduler) ## Instruction: Add ppc64le tarball rc force builder ## Code After: rc_scheduler = ForceScheduler( name="rc build", builderNames=["package_osx10.9-x64", "package_win6.2-x64", "package_win6.2-x86", "package_tarball64", "package_tarball32", "package_tarballarm", "package_tarballppc64le"], reason=FixedParameter(name="reason", default=""), branch=FixedParameter(name="branch", default=""), repository=FixedParameter(name="repository", default=""), project=FixedParameter(name="project", default="Packaging"), properties=[ ] ) c['schedulers'].append(rc_scheduler)
# ... existing code ... rc_scheduler = ForceScheduler( name="rc build", builderNames=["package_osx10.9-x64", "package_win6.2-x64", "package_win6.2-x86", "package_tarball64", "package_tarball32", "package_tarballarm", "package_tarballppc64le"], reason=FixedParameter(name="reason", default=""), branch=FixedParameter(name="branch", default=""), repository=FixedParameter(name="repository", default=""), # ... rest of the code ...
749aa35a85b6482cfba9dec7d37473a787d73c32
integration-test/1106-merge-ocean-earth.py
integration-test/1106-merge-ocean-earth.py
assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2) # There should be a single (merged) earth feature in this tile assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2) assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2) # OpenStreetMap assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2) assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
Add lowzoom tests for polygon merging
Add lowzoom tests for polygon merging
Python
mit
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
python
## Code Before: assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2) # There should be a single (merged) earth feature in this tile assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2) ## Instruction: Add lowzoom tests for polygon merging ## Code After: assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2) assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2) # OpenStreetMap assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2) assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2)
# ... existing code ... assert_less_than_n_features(5, 11, 11, 'water', {'kind': 'ocean'}, 2) assert_less_than_n_features(5, 8, 11, 'earth', {'kind': 'earth'}, 2) # OpenStreetMap assert_less_than_n_features(9, 167, 186, 'water', {'kind': 'ocean'}, 2) assert_less_than_n_features(9, 170, 186, 'earth', {'kind': 'earth'}, 2) # ... rest of the code ...
0668b59d8ec73e80976928706f96922605fe4f67
tsserver/models.py
tsserver/models.py
from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ id = db.Column(db.Integer, primary_key=True) timestamp = db.Column(db.DateTime) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure}
from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ timestamp = db.Column(db.DateTime, primary_key=True) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure}
Remove integer ID in Telemetry model
Remove integer ID in Telemetry model
Python
mit
m4tx/techswarm-server
python
## Code Before: from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ id = db.Column(db.Integer, primary_key=True) timestamp = db.Column(db.DateTime) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure} ## Instruction: Remove integer ID in Telemetry model ## Code After: from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ timestamp = db.Column(db.DateTime, primary_key=True) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure}
# ... existing code ... (every second or so). """ timestamp = db.Column(db.DateTime, primary_key=True) temperature = db.Column(db.Float) pressure = db.Column(db.Float) # ... rest of the code ...
5ecb6e7c56aa12019c4ca2c47bdd87c49981ad78
setup.py
setup.py
from setuptools import setup, find_packages install_requires=['django>=1.5', 'django-easysettings', 'pytz'] try: import importlib except ImportError: install_requires.append('importlib') setup( name='django-password-policies', version=__import__('password_policies').__version__, description='A Django application to implent password policies.', long_description="""\ django-password-policies is an application for the Django framework that provides unicode-aware password policies on password changes and resets and a mechanism to force password changes. """, author='Tarak Blah', author_email='[email protected]', url='https://github.com/tarak/django-password-policies', include_package_data=True, packages=find_packages(), zip_safe=False, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities' ], install_requires=install_requires, test_suite='tests.runtests', )
from setuptools import setup, find_packages install_requires=['django>=1.5', 'django-easysettings', 'pytz'] try: import importlib except ImportError: install_requires.append('importlib') setup( name='django-password-policies', version=__import__('password_policies').__version__, description='A Django application to implent password policies.', long_description="""\ django-password-policies is an application for the Django framework that provides unicode-aware password policies on password changes and resets and a mechanism to force password changes. """, author='Tarak Blah', author_email='[email protected]', url='https://github.com/tarak/django-password-policies', include_package_data=True, packages=find_packages(), zip_safe=False, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities' ], install_requires=install_requires, test_suite='tests.runtests', )
Add the Language classifiers for 3.x and 2.x
Add the Language classifiers for 3.x and 2.x
Python
bsd-3-clause
tarak/django-password-policies,tarak/django-password-policies
python
## Code Before: from setuptools import setup, find_packages install_requires=['django>=1.5', 'django-easysettings', 'pytz'] try: import importlib except ImportError: install_requires.append('importlib') setup( name='django-password-policies', version=__import__('password_policies').__version__, description='A Django application to implent password policies.', long_description="""\ django-password-policies is an application for the Django framework that provides unicode-aware password policies on password changes and resets and a mechanism to force password changes. """, author='Tarak Blah', author_email='[email protected]', url='https://github.com/tarak/django-password-policies', include_package_data=True, packages=find_packages(), zip_safe=False, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities' ], install_requires=install_requires, test_suite='tests.runtests', ) ## Instruction: Add the Language classifiers for 3.x and 2.x ## Code After: from setuptools import setup, find_packages install_requires=['django>=1.5', 'django-easysettings', 'pytz'] try: import importlib except ImportError: install_requires.append('importlib') setup( name='django-password-policies', version=__import__('password_policies').__version__, description='A Django application to implent password policies.', long_description="""\ django-password-policies is an application for the Django framework that provides unicode-aware password policies on password changes and resets and a mechanism to force password changes. """, author='Tarak Blah', author_email='[email protected]', url='https://github.com/tarak/django-password-policies', include_package_data=True, packages=find_packages(), zip_safe=False, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Utilities' ], install_requires=install_requires, test_suite='tests.runtests', )
# ... existing code ... 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', # ... rest of the code ...
1afc2d2704568e5d587eb2ceb18802a4d9bdf7af
core/cortex-m0/config_core.h
core/cortex-m0/config_core.h
/* Copyright 2014 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __CROS_EC_CONFIG_CORE_H #define __CROS_EC_CONFIG_CORE_H /* Linker binary architecture and format */ #define BFD_ARCH arm #define BFD_FORMAT "elf32-littlearm" /* Emulate the CLZ/CTZ instructions since the CPU core is lacking support */ #define CONFIG_SOFTWARE_CLZ #define CONFIG_SOFTWARE_CTZ #define CONFIG_SOFTWARE_PANIC #define CONFIG_ASSEMBLY_MULA32 #endif /* __CROS_EC_CONFIG_CORE_H */
/* Copyright 2014 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __CROS_EC_CONFIG_CORE_H #define __CROS_EC_CONFIG_CORE_H /* Linker binary architecture and format */ #define BFD_ARCH arm #define BFD_FORMAT "elf32-littlearm" /* * Emulate the CLZ/CTZ instructions since the CPU core is lacking support. * When building with clang, we rely on compiler_rt to provide this support. */ #ifndef __clang__ #define CONFIG_SOFTWARE_CLZ #define CONFIG_SOFTWARE_CTZ #endif /* __clang__ */ #define CONFIG_SOFTWARE_PANIC #define CONFIG_ASSEMBLY_MULA32 #endif /* __CROS_EC_CONFIG_CORE_H */
Use compiler_rt version of clz and ctz
core/cortex-m0: Use compiler_rt version of clz and ctz Use __clzsi2 and __ctzsi2 from compiler_rt instead of our own version. Using the compiler_rt versions result in a slightly smaller image. servo_micro before this change: RO: 18744 bytes in flash remaining RW: 23192 bytes in flash remaining servo_micro after this change: RO: 18808 bytes in flash remaining RW: 23256 bytes in flash remaining BRANCH=none BUG=b:172020503 TEST=CC=clang make BOARD=servo_micro TEST=make buildall Signed-off-by: Tom Hughes <[email protected]> Change-Id: Ibc19a3670127dde211fb20d247c1284d0aec5f61 Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3199739 Reviewed-by: Jack Rosenthal <[email protected]>
C
bsd-3-clause
coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec
c
## Code Before: /* Copyright 2014 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __CROS_EC_CONFIG_CORE_H #define __CROS_EC_CONFIG_CORE_H /* Linker binary architecture and format */ #define BFD_ARCH arm #define BFD_FORMAT "elf32-littlearm" /* Emulate the CLZ/CTZ instructions since the CPU core is lacking support */ #define CONFIG_SOFTWARE_CLZ #define CONFIG_SOFTWARE_CTZ #define CONFIG_SOFTWARE_PANIC #define CONFIG_ASSEMBLY_MULA32 #endif /* __CROS_EC_CONFIG_CORE_H */ ## Instruction: core/cortex-m0: Use compiler_rt version of clz and ctz Use __clzsi2 and __ctzsi2 from compiler_rt instead of our own version. Using the compiler_rt versions result in a slightly smaller image. servo_micro before this change: RO: 18744 bytes in flash remaining RW: 23192 bytes in flash remaining servo_micro after this change: RO: 18808 bytes in flash remaining RW: 23256 bytes in flash remaining BRANCH=none BUG=b:172020503 TEST=CC=clang make BOARD=servo_micro TEST=make buildall Signed-off-by: Tom Hughes <[email protected]> Change-Id: Ibc19a3670127dde211fb20d247c1284d0aec5f61 Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3199739 Reviewed-by: Jack Rosenthal <[email protected]> ## Code After: /* Copyright 2014 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __CROS_EC_CONFIG_CORE_H #define __CROS_EC_CONFIG_CORE_H /* Linker binary architecture and format */ #define BFD_ARCH arm #define BFD_FORMAT "elf32-littlearm" /* * Emulate the CLZ/CTZ instructions since the CPU core is lacking support. * When building with clang, we rely on compiler_rt to provide this support. */ #ifndef __clang__ #define CONFIG_SOFTWARE_CLZ #define CONFIG_SOFTWARE_CTZ #endif /* __clang__ */ #define CONFIG_SOFTWARE_PANIC #define CONFIG_ASSEMBLY_MULA32 #endif /* __CROS_EC_CONFIG_CORE_H */
// ... existing code ... #define BFD_ARCH arm #define BFD_FORMAT "elf32-littlearm" /* * Emulate the CLZ/CTZ instructions since the CPU core is lacking support. * When building with clang, we rely on compiler_rt to provide this support. */ #ifndef __clang__ #define CONFIG_SOFTWARE_CLZ #define CONFIG_SOFTWARE_CTZ #endif /* __clang__ */ #define CONFIG_SOFTWARE_PANIC #define CONFIG_ASSEMBLY_MULA32 // ... rest of the code ...
99d0f754b39bdddf58e44e669d24157227a43107
heliotron/__init__.py
heliotron/__init__.py
from heliotron.bridge import Bridge from heliotron.light import Light import heliotron.presets __all__ = ['Bridge', 'Light', 'presets']
from heliotron.bridge import Bridge from heliotron.light import Light from heliotron import presets __all__ = ['Bridge', 'Light', 'presets']
Change module import to squash a code smell
Change module import to squash a code smell
Python
mit
briancline/heliotron
python
## Code Before: from heliotron.bridge import Bridge from heliotron.light import Light import heliotron.presets __all__ = ['Bridge', 'Light', 'presets'] ## Instruction: Change module import to squash a code smell ## Code After: from heliotron.bridge import Bridge from heliotron.light import Light from heliotron import presets __all__ = ['Bridge', 'Light', 'presets']
# ... existing code ... from heliotron.bridge import Bridge from heliotron.light import Light from heliotron import presets __all__ = ['Bridge', 'Light', 'presets'] # ... rest of the code ...
ff992db4ec52331ceb753cae1e8b73fa90adb334
sw-server/build.gradle.kts
sw-server/build.gradle.kts
plugins { kotlin("jvm") kotlin("plugin.spring") id("org.springframework.boot") version "2.3.3.RELEASE" } apply(plugin = "io.spring.dependency-management") dependencies { implementation(project(":sw-common-model")) implementation(project(":sw-engine")) implementation(project(":sw-bot")) implementation(kotlin("reflect")) // required by Spring 5 implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.4.1") implementation("org.springframework.boot:spring-boot-starter-websocket") implementation("org.springframework.boot:spring-boot-starter-security") // required by spring security when using websockets implementation("org.springframework.security:spring-security-messaging") implementation("com.fasterxml.jackson.module:jackson-module-kotlin") implementation("ch.qos.logback:logback-classic:1.1.8") testImplementation(kotlin("test")) testImplementation(kotlin("test-junit")) testImplementation(project(":sw-client")) testImplementation("org.springframework.boot:spring-boot-starter-test") testImplementation("com.fasterxml.jackson.module:jackson-module-kotlin") } tasks.processResources { // package the frontend app within the jar as static val frontendBuildDir = project(":sw-ui").buildDir val frontendDist = frontendBuildDir.toPath().resolve("distributions") from(frontendDist) { include("**/*") into("static") } } // make sure we build the frontend before creating the jar tasks.processResources.get().dependsOn(":sw-ui:assemble")
plugins { kotlin("jvm") kotlin("plugin.spring") id("org.springframework.boot") version "2.4.0" } apply(plugin = "io.spring.dependency-management") dependencies { implementation(project(":sw-common-model")) implementation(project(":sw-engine")) implementation(project(":sw-bot")) implementation(kotlin("reflect")) // required by Spring 5 implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.4.1") implementation("org.springframework.boot:spring-boot-starter-websocket") implementation("org.springframework.boot:spring-boot-starter-security") // required by spring security when using websockets implementation("org.springframework.security:spring-security-messaging") implementation("com.fasterxml.jackson.module:jackson-module-kotlin") implementation("ch.qos.logback:logback-classic:1.1.8") testImplementation(kotlin("test")) testImplementation(kotlin("test-junit")) testImplementation(project(":sw-client")) testImplementation("org.springframework.boot:spring-boot-starter-test") testImplementation("com.fasterxml.jackson.module:jackson-module-kotlin") } tasks.processResources { // package the frontend app within the jar as static val frontendBuildDir = project(":sw-ui").buildDir val frontendDist = frontendBuildDir.toPath().resolve("distributions") from(frontendDist) { include("**/*") into("static") } } // make sure we build the frontend before creating the jar tasks.processResources.get().dependsOn(":sw-ui:assemble")
Upgrade to Spring Boot 2.4.0
Upgrade to Spring Boot 2.4.0
Kotlin
mit
luxons/seven-wonders,luxons/seven-wonders,luxons/seven-wonders
kotlin
## Code Before: plugins { kotlin("jvm") kotlin("plugin.spring") id("org.springframework.boot") version "2.3.3.RELEASE" } apply(plugin = "io.spring.dependency-management") dependencies { implementation(project(":sw-common-model")) implementation(project(":sw-engine")) implementation(project(":sw-bot")) implementation(kotlin("reflect")) // required by Spring 5 implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.4.1") implementation("org.springframework.boot:spring-boot-starter-websocket") implementation("org.springframework.boot:spring-boot-starter-security") // required by spring security when using websockets implementation("org.springframework.security:spring-security-messaging") implementation("com.fasterxml.jackson.module:jackson-module-kotlin") implementation("ch.qos.logback:logback-classic:1.1.8") testImplementation(kotlin("test")) testImplementation(kotlin("test-junit")) testImplementation(project(":sw-client")) testImplementation("org.springframework.boot:spring-boot-starter-test") testImplementation("com.fasterxml.jackson.module:jackson-module-kotlin") } tasks.processResources { // package the frontend app within the jar as static val frontendBuildDir = project(":sw-ui").buildDir val frontendDist = frontendBuildDir.toPath().resolve("distributions") from(frontendDist) { include("**/*") into("static") } } // make sure we build the frontend before creating the jar tasks.processResources.get().dependsOn(":sw-ui:assemble") ## Instruction: Upgrade to Spring Boot 2.4.0 ## Code After: plugins { kotlin("jvm") kotlin("plugin.spring") id("org.springframework.boot") version "2.4.0" } apply(plugin = "io.spring.dependency-management") dependencies { implementation(project(":sw-common-model")) implementation(project(":sw-engine")) implementation(project(":sw-bot")) implementation(kotlin("reflect")) // required by Spring 5 implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.4.1") implementation("org.springframework.boot:spring-boot-starter-websocket") implementation("org.springframework.boot:spring-boot-starter-security") // required by spring security when using websockets implementation("org.springframework.security:spring-security-messaging") implementation("com.fasterxml.jackson.module:jackson-module-kotlin") implementation("ch.qos.logback:logback-classic:1.1.8") testImplementation(kotlin("test")) testImplementation(kotlin("test-junit")) testImplementation(project(":sw-client")) testImplementation("org.springframework.boot:spring-boot-starter-test") testImplementation("com.fasterxml.jackson.module:jackson-module-kotlin") } tasks.processResources { // package the frontend app within the jar as static val frontendBuildDir = project(":sw-ui").buildDir val frontendDist = frontendBuildDir.toPath().resolve("distributions") from(frontendDist) { include("**/*") into("static") } } // make sure we build the frontend before creating the jar tasks.processResources.get().dependsOn(":sw-ui:assemble")
// ... existing code ... plugins { kotlin("jvm") kotlin("plugin.spring") id("org.springframework.boot") version "2.4.0" } apply(plugin = "io.spring.dependency-management") // ... rest of the code ...
5b8b210a73282f6176883f3fab1dd0b2801b3f34
wsgi/app.py
wsgi/app.py
try: import newrelic.agent except ImportError: newrelic = False if newrelic: newrelic_ini = config('NEWRELIC_PYTHON_INI_FILE', default='') if newrelic_ini: newrelic.agent.initialize(newrelic_ini) else: newrelic = False import os from bedrock.base.config_manager import config IS_HTTPS = os.environ.get('HTTPS', '').strip() == 'on' os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bedrock.settings') # must be imported after env var is set above. from django.core.handlers.wsgi import WSGIRequest from django.core.wsgi import get_wsgi_application from whitenoise.django import DjangoWhiteNoise from raven.contrib.django.raven_compat.middleware.wsgi import Sentry class WSGIHTTPSRequest(WSGIRequest): def _get_scheme(self): if IS_HTTPS: return 'https' return super(WSGIHTTPSRequest, self)._get_scheme() application = get_wsgi_application() application.request_class = WSGIHTTPSRequest application = DjangoWhiteNoise(application) application = Sentry(application) if newrelic: application = newrelic.agent.wsgi_application()(application)
try: import newrelic.agent except ImportError: newrelic = False else: newrelic.agent.initialize() import os from bedrock.base.config_manager import config IS_HTTPS = os.environ.get('HTTPS', '').strip() == 'on' os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bedrock.settings') # must be imported after env var is set above. from django.core.handlers.wsgi import WSGIRequest from django.core.wsgi import get_wsgi_application from whitenoise.django import DjangoWhiteNoise from raven.contrib.django.raven_compat.middleware.wsgi import Sentry class WSGIHTTPSRequest(WSGIRequest): def _get_scheme(self): if IS_HTTPS: return 'https' return super(WSGIHTTPSRequest, self)._get_scheme() application = get_wsgi_application() application.request_class = WSGIHTTPSRequest application = DjangoWhiteNoise(application) application = Sentry(application) if newrelic: application = newrelic.agent.wsgi_application()(application)
Remove unused ability to use custom newrelic.ini
Remove unused ability to use custom newrelic.ini
Python
mpl-2.0
flodolo/bedrock,craigcook/bedrock,hoosteeno/bedrock,sylvestre/bedrock,craigcook/bedrock,pascalchevrel/bedrock,kyoshino/bedrock,kyoshino/bedrock,sgarrity/bedrock,ericawright/bedrock,ericawright/bedrock,MichaelKohler/bedrock,mozilla/bedrock,alexgibson/bedrock,alexgibson/bedrock,hoosteeno/bedrock,ericawright/bedrock,sgarrity/bedrock,flodolo/bedrock,flodolo/bedrock,mozilla/bedrock,pascalchevrel/bedrock,sgarrity/bedrock,craigcook/bedrock,sylvestre/bedrock,MichaelKohler/bedrock,hoosteeno/bedrock,kyoshino/bedrock,sylvestre/bedrock,MichaelKohler/bedrock,alexgibson/bedrock,ericawright/bedrock,craigcook/bedrock,mozilla/bedrock,alexgibson/bedrock,kyoshino/bedrock,sgarrity/bedrock,flodolo/bedrock,MichaelKohler/bedrock,hoosteeno/bedrock,mozilla/bedrock,sylvestre/bedrock,pascalchevrel/bedrock,pascalchevrel/bedrock
python
## Code Before: try: import newrelic.agent except ImportError: newrelic = False if newrelic: newrelic_ini = config('NEWRELIC_PYTHON_INI_FILE', default='') if newrelic_ini: newrelic.agent.initialize(newrelic_ini) else: newrelic = False import os from bedrock.base.config_manager import config IS_HTTPS = os.environ.get('HTTPS', '').strip() == 'on' os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bedrock.settings') # must be imported after env var is set above. from django.core.handlers.wsgi import WSGIRequest from django.core.wsgi import get_wsgi_application from whitenoise.django import DjangoWhiteNoise from raven.contrib.django.raven_compat.middleware.wsgi import Sentry class WSGIHTTPSRequest(WSGIRequest): def _get_scheme(self): if IS_HTTPS: return 'https' return super(WSGIHTTPSRequest, self)._get_scheme() application = get_wsgi_application() application.request_class = WSGIHTTPSRequest application = DjangoWhiteNoise(application) application = Sentry(application) if newrelic: application = newrelic.agent.wsgi_application()(application) ## Instruction: Remove unused ability to use custom newrelic.ini ## Code After: try: import newrelic.agent except ImportError: newrelic = False else: newrelic.agent.initialize() import os from bedrock.base.config_manager import config IS_HTTPS = os.environ.get('HTTPS', '').strip() == 'on' os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bedrock.settings') # must be imported after env var is set above. from django.core.handlers.wsgi import WSGIRequest from django.core.wsgi import get_wsgi_application from whitenoise.django import DjangoWhiteNoise from raven.contrib.django.raven_compat.middleware.wsgi import Sentry class WSGIHTTPSRequest(WSGIRequest): def _get_scheme(self): if IS_HTTPS: return 'https' return super(WSGIHTTPSRequest, self)._get_scheme() application = get_wsgi_application() application.request_class = WSGIHTTPSRequest application = DjangoWhiteNoise(application) application = Sentry(application) if newrelic: application = newrelic.agent.wsgi_application()(application)
... import newrelic.agent except ImportError: newrelic = False else: newrelic.agent.initialize() import os ...
6c6f6ec6c5a895f083ff8c9b9a0d76791bb13ce9
app/eve_api/tasks/static.py
app/eve_api/tasks/static.py
from celery.decorators import task from eve_proxy.models import CachedDocument from eve_api.utils import basic_xml_parse_doc from eve_api.models import EVESkill, EVESkillGroup @task() def import_eve_skills(): """ Imports the skill tree and groups """ char_doc = CachedDocument.objects.api_query('/eve/SkillTree.xml.aspx') d = basic_xml_parse_doc(char_doc)['eveapi'] if 'error' in d: return values = d['result'] for group in values['skillGroups']: gobj, created = EVESkillGroup.objects.get_or_create(id=group['groupID']) if created: gobj.name = group['groupName'] gobj.save() for skill in group['skills']: skillobj, created = EVESkill.objects.get_or_create(id=skill['typeID']) if created or not skillobj.name or not skillobj.group: skillobj.name = skill['typeName'] skillobj.group = gobj skillobj.save()
from celery.decorators import task from eve_proxy.models import CachedDocument from eve_api.utils import basic_xml_parse_doc from eve_api.models import EVESkill, EVESkillGroup @task() def import_eve_skills(): """ Imports the skill tree and groups """ char_doc = CachedDocument.objects.api_query('/eve/SkillTree.xml.aspx') d = basic_xml_parse_doc(char_doc)['eveapi'] if 'error' in d: return values = d['result'] for group in values['skillGroups']: gobj, created = EVESkillGroup.objects.get_or_create(id=group['groupID']) if created or not gobj.name or not gobj.name == group['groupName']: gobj.name = group['groupName'] gobj.save() for skill in group['skills']: skillobj, created = EVESkill.objects.get_or_create(id=skill['typeID']) if created or not skillobj.name or not skillobj.group or not skillobj.name == skill['typeName']: skillobj.name = skill['typeName'] skillobj.group = gobj skillobj.save()
Support if skill group/types are changed
Support if skill group/types are changed
Python
bsd-3-clause
nikdoof/test-auth
python
## Code Before: from celery.decorators import task from eve_proxy.models import CachedDocument from eve_api.utils import basic_xml_parse_doc from eve_api.models import EVESkill, EVESkillGroup @task() def import_eve_skills(): """ Imports the skill tree and groups """ char_doc = CachedDocument.objects.api_query('/eve/SkillTree.xml.aspx') d = basic_xml_parse_doc(char_doc)['eveapi'] if 'error' in d: return values = d['result'] for group in values['skillGroups']: gobj, created = EVESkillGroup.objects.get_or_create(id=group['groupID']) if created: gobj.name = group['groupName'] gobj.save() for skill in group['skills']: skillobj, created = EVESkill.objects.get_or_create(id=skill['typeID']) if created or not skillobj.name or not skillobj.group: skillobj.name = skill['typeName'] skillobj.group = gobj skillobj.save() ## Instruction: Support if skill group/types are changed ## Code After: from celery.decorators import task from eve_proxy.models import CachedDocument from eve_api.utils import basic_xml_parse_doc from eve_api.models import EVESkill, EVESkillGroup @task() def import_eve_skills(): """ Imports the skill tree and groups """ char_doc = CachedDocument.objects.api_query('/eve/SkillTree.xml.aspx') d = basic_xml_parse_doc(char_doc)['eveapi'] if 'error' in d: return values = d['result'] for group in values['skillGroups']: gobj, created = EVESkillGroup.objects.get_or_create(id=group['groupID']) if created or not gobj.name or not gobj.name == group['groupName']: gobj.name = group['groupName'] gobj.save() for skill in group['skills']: skillobj, created = EVESkill.objects.get_or_create(id=skill['typeID']) if created or not skillobj.name or not skillobj.group or not skillobj.name == skill['typeName']: skillobj.name = skill['typeName'] skillobj.group = gobj skillobj.save()
# ... existing code ... for group in values['skillGroups']: gobj, created = EVESkillGroup.objects.get_or_create(id=group['groupID']) if created or not gobj.name or not gobj.name == group['groupName']: gobj.name = group['groupName'] gobj.save() for skill in group['skills']: skillobj, created = EVESkill.objects.get_or_create(id=skill['typeID']) if created or not skillobj.name or not skillobj.group or not skillobj.name == skill['typeName']: skillobj.name = skill['typeName'] skillobj.group = gobj skillobj.save() # ... rest of the code ...
056fb9fe70d38a86bcb3dfc3c2c71ebcdc4bafd9
documentation/src/test/java/example/DisplayNameDemo.java
documentation/src/test/java/example/DisplayNameDemo.java
/* * Copyright 2015-2016 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package example; // tag::user_guide[] import org.junit.gen5.api.DisplayName; import org.junit.gen5.api.Test; @DisplayName("A special test case") class DisplayNameDemo { @Test @DisplayName("A nice name, isn't it?") void testWithANiceName() { } } // end::user_guide[]
/* * Copyright 2015-2016 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package example; // tag::user_guide[] import org.junit.gen5.api.DisplayName; import org.junit.gen5.api.Test; @DisplayName("A special test case") class DisplayNameDemo { @Test @DisplayName("Custom test name containing spaces") void testWithDisplayNameContainingSpaces() { } @Test @DisplayName("╯°□°)╯") void testWithDisplayNameContainingSpecialCharacters() { } @Test @DisplayName("😱") void testWithDisplayNameContainingEmoji() { } } // end::user_guide[]
Include emoji and special characters in @DisplayName demo
Include emoji and special characters in @DisplayName demo
Java
epl-1.0
sbrannen/junit-lambda,junit-team/junit-lambda,marcphilipp/junit5,marcphilipp/junit-lambda
java
## Code Before: /* * Copyright 2015-2016 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package example; // tag::user_guide[] import org.junit.gen5.api.DisplayName; import org.junit.gen5.api.Test; @DisplayName("A special test case") class DisplayNameDemo { @Test @DisplayName("A nice name, isn't it?") void testWithANiceName() { } } // end::user_guide[] ## Instruction: Include emoji and special characters in @DisplayName demo ## Code After: /* * Copyright 2015-2016 the original author or authors. * * All rights reserved. This program and the accompanying materials are * made available under the terms of the Eclipse Public License v1.0 which * accompanies this distribution and is available at * * http://www.eclipse.org/legal/epl-v10.html */ package example; // tag::user_guide[] import org.junit.gen5.api.DisplayName; import org.junit.gen5.api.Test; @DisplayName("A special test case") class DisplayNameDemo { @Test @DisplayName("Custom test name containing spaces") void testWithDisplayNameContainingSpaces() { } @Test @DisplayName("╯°□°)╯") void testWithDisplayNameContainingSpecialCharacters() { } @Test @DisplayName("😱") void testWithDisplayNameContainingEmoji() { } } // end::user_guide[]
... class DisplayNameDemo { @Test @DisplayName("Custom test name containing spaces") void testWithDisplayNameContainingSpaces() { } @Test @DisplayName("╯°□°)╯") void testWithDisplayNameContainingSpecialCharacters() { } @Test @DisplayName("😱") void testWithDisplayNameContainingEmoji() { } } // end::user_guide[] ...
61bbd4e8fc0712fe56614481173eb86d409eb8d7
tests/test_linked_list.py
tests/test_linked_list.py
from unittest import TestCase from pystructures.linked_lists import LinkedList, Node class TestNode(TestCase): def test_value(self): """ A simple test to check the Node's value """ node = Node(10) self.assertEqual(10, node.value) def test_improper_node(self): """ A test to check if an invalid data type is set as a node's next""" node = Node(10) with self.assertRaises(ValueError): node.next = "Hello" class TestLinkedList(TestCase): def test_insert(self): """ A simple test to check if insertion works as expected in a singly linked list """ l = LinkedList() results = [l.insert(val) for val in xrange(10, 100, 10)] self.assertEqual(len(set(results)), 1) self.assertTrue(results[0], msg="Testing for successful insertion...") self.assertEqual(len(results), l.size, msg="Testing if # of results equal list size...")
from builtins import range from unittest import TestCase from pystructures.linked_lists import LinkedList, Node class TestNode(TestCase): def test_value(self): """ A simple test to check the Node's value """ node = Node(10) self.assertEqual(10, node.value) def test_improper_node(self): """ A test to check if an invalid data type is set as a node's next""" node = Node(10) with self.assertRaises(ValueError): node.next = "Hello" class TestLinkedList(TestCase): def test_insert(self): """ A simple test to check if insertion works as expected in a singly linked list """ l = LinkedList() results = [l.insert(val) for val in range(10, 100, 10)] self.assertEqual(len(set(results)), 1) self.assertTrue(results[0], msg="Testing for successful insertion...") self.assertEqual(len(results), l.size, msg="Testing if # of results equal list size...")
Fix range issue with travis
Fix range issue with travis
Python
mit
apranav19/pystructures
python
## Code Before: from unittest import TestCase from pystructures.linked_lists import LinkedList, Node class TestNode(TestCase): def test_value(self): """ A simple test to check the Node's value """ node = Node(10) self.assertEqual(10, node.value) def test_improper_node(self): """ A test to check if an invalid data type is set as a node's next""" node = Node(10) with self.assertRaises(ValueError): node.next = "Hello" class TestLinkedList(TestCase): def test_insert(self): """ A simple test to check if insertion works as expected in a singly linked list """ l = LinkedList() results = [l.insert(val) for val in xrange(10, 100, 10)] self.assertEqual(len(set(results)), 1) self.assertTrue(results[0], msg="Testing for successful insertion...") self.assertEqual(len(results), l.size, msg="Testing if # of results equal list size...") ## Instruction: Fix range issue with travis ## Code After: from builtins import range from unittest import TestCase from pystructures.linked_lists import LinkedList, Node class TestNode(TestCase): def test_value(self): """ A simple test to check the Node's value """ node = Node(10) self.assertEqual(10, node.value) def test_improper_node(self): """ A test to check if an invalid data type is set as a node's next""" node = Node(10) with self.assertRaises(ValueError): node.next = "Hello" class TestLinkedList(TestCase): def test_insert(self): """ A simple test to check if insertion works as expected in a singly linked list """ l = LinkedList() results = [l.insert(val) for val in range(10, 100, 10)] self.assertEqual(len(set(results)), 1) self.assertTrue(results[0], msg="Testing for successful insertion...") self.assertEqual(len(results), l.size, msg="Testing if # of results equal list size...")
// ... existing code ... from builtins import range from unittest import TestCase from pystructures.linked_lists import LinkedList, Node // ... modified code ... def test_insert(self): """ A simple test to check if insertion works as expected in a singly linked list """ l = LinkedList() results = [l.insert(val) for val in range(10, 100, 10)] self.assertEqual(len(set(results)), 1) self.assertTrue(results[0], msg="Testing for successful insertion...") self.assertEqual(len(results), l.size, msg="Testing if # of results equal list size...") // ... rest of the code ...
40fe604adc38095a65b2fd9168badb50daa65b14
thefuck/rules/git_pull.py
thefuck/rules/git_pull.py
def match(command, settings): return ('git' in command.script and 'pull' in command.script and 'set-upstream' in command.stderr) def get_new_command(command, settings): line = command.stderr.split('\n')[-3].strip() branch = line.split(' ')[-1] set_upstream = line.replace('<remote>', 'origin')\ .replace('<branch>', branch) return u'{} && {}'.format(set_upstream, command.script)
from thefuck import shells def match(command, settings): return ('git' in command.script and 'pull' in command.script and 'set-upstream' in command.stderr) def get_new_command(command, settings): line = command.stderr.split('\n')[-3].strip() branch = line.split(' ')[-1] set_upstream = line.replace('<remote>', 'origin')\ .replace('<branch>', branch) return shells.and_(set_upstream, command.script)
Replace use of '&&' by shells.and_
Replace use of '&&' by shells.and_
Python
mit
scorphus/thefuck,subajat1/thefuck,bigplus/thefuck,beni55/thefuck,barneyElDinosaurio/thefuck,roth1002/thefuck,bigplus/thefuck,nvbn/thefuck,mcarton/thefuck,sekaiamber/thefuck,mlk/thefuck,gogobebe2/thefuck,petr-tichy/thefuck,SimenB/thefuck,hxddh/thefuck,PLNech/thefuck,LawrenceHan/thefuck,gaurav9991/thefuck,redreamality/thefuck,vanita5/thefuck,hxddh/thefuck,BertieJim/thefuck,PLNech/thefuck,barneyElDinosaurio/thefuck,princeofdarkness76/thefuck,qingying5810/thefuck,beni55/thefuck,Clpsplug/thefuck,artiya4u/thefuck,ostree/thefuck,levythu/thefuck,vanita5/thefuck,princeofdarkness76/thefuck,roth1002/thefuck,lawrencebenson/thefuck,MJerty/thefuck,LawrenceHan/thefuck,scorphus/thefuck,bugaevc/thefuck,mcarton/thefuck,mbbill/thefuck,suxinde2009/thefuck,AntonChankin/thefuck,levythu/thefuck,MJerty/thefuck,manashmndl/thefuck,mlk/thefuck,subajat1/thefuck,AntonChankin/thefuck,zhangzhishan/thefuck,NguyenHoaiNam/thefuck,thinkerchan/thefuck,nvbn/thefuck,lawrencebenson/thefuck,thinkerchan/thefuck,Clpsplug/thefuck,ostree/thefuck,manashmndl/thefuck,BertieJim/thefuck,Aeron/thefuck,thesoulkiller/thefuck,SimenB/thefuck,redreamality/thefuck,thesoulkiller/thefuck
python
## Code Before: def match(command, settings): return ('git' in command.script and 'pull' in command.script and 'set-upstream' in command.stderr) def get_new_command(command, settings): line = command.stderr.split('\n')[-3].strip() branch = line.split(' ')[-1] set_upstream = line.replace('<remote>', 'origin')\ .replace('<branch>', branch) return u'{} && {}'.format(set_upstream, command.script) ## Instruction: Replace use of '&&' by shells.and_ ## Code After: from thefuck import shells def match(command, settings): return ('git' in command.script and 'pull' in command.script and 'set-upstream' in command.stderr) def get_new_command(command, settings): line = command.stderr.split('\n')[-3].strip() branch = line.split(' ')[-1] set_upstream = line.replace('<remote>', 'origin')\ .replace('<branch>', branch) return shells.and_(set_upstream, command.script)
// ... existing code ... from thefuck import shells def match(command, settings): return ('git' in command.script and 'pull' in command.script // ... modified code ... branch = line.split(' ')[-1] set_upstream = line.replace('<remote>', 'origin')\ .replace('<branch>', branch) return shells.and_(set_upstream, command.script) // ... rest of the code ...
13b4d336f5556be0210b703aaee05e3b5224fb05
tests/GIR/test_001_connection.py
tests/GIR/test_001_connection.py
import sys import struct import unittest from test_000_config import TestConfig from gi.repository import Midgard, GObject class TestConnection(Midgard.Connection): def __init__(self): Midgard.init() Midgard.Connection.__init__(self) @staticmethod def openConnection(): config = TestConfig() mgd = Midgard.Connection() mgd.open_config(config) return mgd class TestMethods(unittest.TestCase): def testOpenConfig(self): config = TestConfig() mgd = TestConnection() self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK") self.assertTrue(mgd.open_config(config)) self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK") def testInheritance(self): mgd = TestConnection() self.assertIsInstance(mgd, GObject.GObject) if __name__ == "__main__": unittest.main()
import sys import struct import unittest from test_000_config import TestConfig from gi.repository import Midgard, GObject class TestConnection(Midgard.Connection): def __init__(self): Midgard.init() Midgard.Connection.__init__(self) @staticmethod def openConnection(): config = TestConfig() mgd = Midgard.Connection() if mgd.open_config(config) is True: return mgd print mgd.get_error_string() return None class TestMethods(unittest.TestCase): def testOpenConfig(self): config = TestConfig() mgd = TestConnection() self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK") self.assertTrue(mgd.open_config(config)) self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK") def testInheritance(self): mgd = TestConnection() self.assertIsInstance(mgd, GObject.GObject) if __name__ == "__main__": unittest.main()
Return Midgard.Connection if one is opened. None otherwise
Return Midgard.Connection if one is opened. None otherwise
Python
lgpl-2.1
midgardproject/midgard-core,piotras/midgard-core,midgardproject/midgard-core,piotras/midgard-core,midgardproject/midgard-core,piotras/midgard-core,piotras/midgard-core,midgardproject/midgard-core
python
## Code Before: import sys import struct import unittest from test_000_config import TestConfig from gi.repository import Midgard, GObject class TestConnection(Midgard.Connection): def __init__(self): Midgard.init() Midgard.Connection.__init__(self) @staticmethod def openConnection(): config = TestConfig() mgd = Midgard.Connection() mgd.open_config(config) return mgd class TestMethods(unittest.TestCase): def testOpenConfig(self): config = TestConfig() mgd = TestConnection() self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK") self.assertTrue(mgd.open_config(config)) self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK") def testInheritance(self): mgd = TestConnection() self.assertIsInstance(mgd, GObject.GObject) if __name__ == "__main__": unittest.main() ## Instruction: Return Midgard.Connection if one is opened. None otherwise ## Code After: import sys import struct import unittest from test_000_config import TestConfig from gi.repository import Midgard, GObject class TestConnection(Midgard.Connection): def __init__(self): Midgard.init() Midgard.Connection.__init__(self) @staticmethod def openConnection(): config = TestConfig() mgd = Midgard.Connection() if mgd.open_config(config) is True: return mgd print mgd.get_error_string() return None class TestMethods(unittest.TestCase): def testOpenConfig(self): config = TestConfig() mgd = TestConnection() self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK") self.assertTrue(mgd.open_config(config)) self.assertEqual(mgd.get_error_string(), "MGD_ERR_OK") def testInheritance(self): mgd = TestConnection() self.assertIsInstance(mgd, GObject.GObject) if __name__ == "__main__": unittest.main()
# ... existing code ... def openConnection(): config = TestConfig() mgd = Midgard.Connection() if mgd.open_config(config) is True: return mgd print mgd.get_error_string() return None class TestMethods(unittest.TestCase): def testOpenConfig(self): # ... rest of the code ...
04fa3a9fd61cc83c23ddd59ea474bd45cd2a1e8c
tests/__init__.py
tests/__init__.py
from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path fs.__path__.insert(0, realpath(join(__file__, "..", "..", "fs")))
from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path # Can only rely on fs.__path__ being an iterable - on windows it's not a list newPath = list(fs.__path__) newPath.insert(0, realpath(join(__file__, "..", "..", "fs"))) fs.__path__ = newPath
Make namespace packages work for tests in windows
Make namespace packages work for tests in windows
Python
mit
rkhwaja/fs.onedrivefs
python
## Code Before: from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path fs.__path__.insert(0, realpath(join(__file__, "..", "..", "fs"))) ## Instruction: Make namespace packages work for tests in windows ## Code After: from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path # Can only rely on fs.__path__ being an iterable - on windows it's not a list newPath = list(fs.__path__) newPath.insert(0, realpath(join(__file__, "..", "..", "fs"))) fs.__path__ = newPath
... import fs # Add the local code directory to the `fs` module path # Can only rely on fs.__path__ being an iterable - on windows it's not a list newPath = list(fs.__path__) newPath.insert(0, realpath(join(__file__, "..", "..", "fs"))) fs.__path__ = newPath ...
161a3f4099aefbf6cbaeb325de27678c6297bd7c
src/pl/src/edu/rice/bayou/experiments/low_level_sketches/DAPICallLowLevel.java
src/pl/src/edu/rice/bayou/experiments/low_level_sketches/DAPICallLowLevel.java
package edu.rice.bayou.experiments.low_level_sketches; public class DAPICallLowLevel extends DASTNodeLowLevel { String node = "DAPICall"; String _call; @Override public String getLowLevelSketch() { return node + delim + _call + delim + STOP; } }
package edu.rice.bayou.experiments.low_level_sketches; public class DAPICallLowLevel extends DASTNodeLowLevel { String node = "DAPICall"; String _call; @Override public String getLowLevelSketch() { String[] tokens = _call.split("[,()]"); return node + delim + String.join(delim, tokens) + delim + STOP; } }
Fix in low_level_sketches experiment data reader
Fix in low_level_sketches experiment data reader
Java
apache-2.0
capergroup/bayou,capergroup/bayou,capergroup/bayou
java
## Code Before: package edu.rice.bayou.experiments.low_level_sketches; public class DAPICallLowLevel extends DASTNodeLowLevel { String node = "DAPICall"; String _call; @Override public String getLowLevelSketch() { return node + delim + _call + delim + STOP; } } ## Instruction: Fix in low_level_sketches experiment data reader ## Code After: package edu.rice.bayou.experiments.low_level_sketches; public class DAPICallLowLevel extends DASTNodeLowLevel { String node = "DAPICall"; String _call; @Override public String getLowLevelSketch() { String[] tokens = _call.split("[,()]"); return node + delim + String.join(delim, tokens) + delim + STOP; } }
... @Override public String getLowLevelSketch() { String[] tokens = _call.split("[,()]"); return node + delim + String.join(delim, tokens) + delim + STOP; } } ...
b89982f7b66b46f4338ff2758219d7419e36d6ba
lms/djangoapps/api_manager/management/commands/migrate_orgdata.py
lms/djangoapps/api_manager/management/commands/migrate_orgdata.py
import json from django.contrib.auth.models import Group from django.core.management.base import BaseCommand from api_manager.models import GroupProfile, Organization class Command(BaseCommand): """ Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model """ def handle(self, *args, **options): org_groups = GroupProfile.objects.filter(group_type='organization') for org in org_groups: data = json.loads(org.data) migrated_org = Organization.objects.create( name=data['name'], display_name=data['display_name'], contact_name=data['contact_name'], contact_email=data['contact_email'], contact_phone=data['contact_phone'] ) group = Group.objects.get(groupprofile=org.id) users = group.user_set.all() for user in users: migrated_org.users.add(user) linked_groups = group.grouprelationship.get_linked_group_relationships() for linked_group in linked_groups: if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component actual_group = Group.objects.get(id=linked_group.to_group_relationship_id) migrated_org.groups.add(actual_group)
import json from django.contrib.auth.models import Group from django.core.management.base import BaseCommand from api_manager.models import GroupProfile, Organization class Command(BaseCommand): """ Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model """ def handle(self, *args, **options): org_groups = GroupProfile.objects.filter(group_type='organization') for org in org_groups: data = json.loads(org.data) name = org.name display_name = data.get('display_name', name) contact_name = data.get('contact_name', None) contact_email = data.get('email', None) if contact_email is None: contact_email = data.get('contact_email', None) contact_phone = data.get('phone', None) if contact_phone is None: contact_phone = data.get('contact_phone', None) migrated_org = Organization.objects.create( name=name, display_name=display_name, contact_name=contact_name, contact_email=contact_email, contact_phone=contact_phone ) group = Group.objects.get(groupprofile=org.id) users = group.user_set.all() for user in users: migrated_org.users.add(user) linked_groups = group.grouprelationship.get_linked_group_relationships() for linked_group in linked_groups: if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component actual_group = Group.objects.get(id=linked_group.to_group_relationship_id) migrated_org.groups.add(actual_group)
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
Python
agpl-3.0
edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform
python
## Code Before: import json from django.contrib.auth.models import Group from django.core.management.base import BaseCommand from api_manager.models import GroupProfile, Organization class Command(BaseCommand): """ Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model """ def handle(self, *args, **options): org_groups = GroupProfile.objects.filter(group_type='organization') for org in org_groups: data = json.loads(org.data) migrated_org = Organization.objects.create( name=data['name'], display_name=data['display_name'], contact_name=data['contact_name'], contact_email=data['contact_email'], contact_phone=data['contact_phone'] ) group = Group.objects.get(groupprofile=org.id) users = group.user_set.all() for user in users: migrated_org.users.add(user) linked_groups = group.grouprelationship.get_linked_group_relationships() for linked_group in linked_groups: if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component actual_group = Group.objects.get(id=linked_group.to_group_relationship_id) migrated_org.groups.add(actual_group) ## Instruction: Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present ## Code After: import json from django.contrib.auth.models import Group from django.core.management.base import BaseCommand from api_manager.models import GroupProfile, Organization class Command(BaseCommand): """ Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model """ def handle(self, *args, **options): org_groups = GroupProfile.objects.filter(group_type='organization') for org in org_groups: data = json.loads(org.data) name = org.name display_name = data.get('display_name', name) contact_name = data.get('contact_name', None) contact_email = data.get('email', None) if contact_email is None: contact_email = data.get('contact_email', None) contact_phone = data.get('phone', None) if contact_phone is None: contact_phone = data.get('contact_phone', None) migrated_org = Organization.objects.create( name=name, display_name=display_name, contact_name=contact_name, contact_email=contact_email, contact_phone=contact_phone ) group = Group.objects.get(groupprofile=org.id) users = group.user_set.all() for user in users: migrated_org.users.add(user) linked_groups = group.grouprelationship.get_linked_group_relationships() for linked_group in linked_groups: if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component actual_group = Group.objects.get(id=linked_group.to_group_relationship_id) migrated_org.groups.add(actual_group)
# ... existing code ... for org in org_groups: data = json.loads(org.data) name = org.name display_name = data.get('display_name', name) contact_name = data.get('contact_name', None) contact_email = data.get('email', None) if contact_email is None: contact_email = data.get('contact_email', None) contact_phone = data.get('phone', None) if contact_phone is None: contact_phone = data.get('contact_phone', None) migrated_org = Organization.objects.create( name=name, display_name=display_name, contact_name=contact_name, contact_email=contact_email, contact_phone=contact_phone ) group = Group.objects.get(groupprofile=org.id) users = group.user_set.all() # ... rest of the code ...
b4bdd8e20b82f8016030037712094f257af9221f
cinder/db/sqlalchemy/migrate_repo/versions/006_snapshots_add_provider_location.py
cinder/db/sqlalchemy/migrate_repo/versions/006_snapshots_add_provider_location.py
from sqlalchemy import Column from sqlalchemy import MetaData, String, Table def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine snapshots = Table('snapshots', meta, autoload=True) provider_location = Column('provider_location', String(255)) snapshots.create_column(provider_location) def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine snapshots = Table('snapshots', meta, autoload=True) provider_location = snapshots.columns.provider_location provider_location.drop()
from sqlalchemy import Column from sqlalchemy import MetaData, String, Table def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine snapshots = Table('snapshots', meta, autoload=True) provider_location = Column('provider_location', String(255)) snapshots.create_column(provider_location) snapshots.update().values(provider_location=None).execute() def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine snapshots = Table('snapshots', meta, autoload=True) provider_location = snapshots.columns.provider_location snapshots.drop_column(provider_location)
Fix provider_location column add for PSQL
Fix provider_location column add for PSQL Migration 006 (commit 690cae58e6bbac5758ea2f7b60774c797d28fba5) didn't work properly for postgres, this patch corrects the upgrade by ensuring the execute is performed and the value is initialized to None. Since we haven't released a milestone etc with this migration in the code it should be safe to just fix it here and submit. Change-Id: I10a09aed3470c35c8ebbe22f29aa511592167c35
Python
apache-2.0
nexusriot/cinder,github-borat/cinder,mahak/cinder,CloudServer/cinder,eharney/cinder,spring-week-topos/cinder-week,blueboxgroup/cinder,potsmaster/cinder,julianwang/cinder,github-borat/cinder,Datera/cinder,j-griffith/cinder,cloudbau/cinder,cloudbase/cinder,redhat-openstack/cinder,NeCTAR-RC/cinder,rakeshmi/cinder,abusse/cinder,winndows/cinder,abusse/cinder,dims/cinder,rickerc/cinder_audit,petrutlucian94/cinder,duhzecca/cinder,julianwang/cinder,ntt-sic/cinder,maelnor/cinder,apporc/cinder,nikesh-mahalka/cinder,tlakshman26/cinder-new-branch,phenoxim/cinder,nikesh-mahalka/cinder,JioCloud/cinder,rickerc/cinder_audit,winndows/cinder,Akrog/cinder,Paul-Ezell/cinder-1,Hybrid-Cloud/cinder,Paul-Ezell/cinder-1,alex8866/cinder,tlakshman26/cinder-https-changes,tlakshman26/cinder-bug-fix-volume-conversion-full,inkerra/cinder,dims/cinder,hguemar/cinder,scottdangelo/RemoveVolumeMangerLocks,j-griffith/cinder,leilihh/cinder,Thingee/cinder,takeshineshiro/cinder,Datera/cinder,bswartz/cinder,nexusriot/cinder,phenoxim/cinder,NetApp/cinder,tlakshman26/cinder-new-branch,openstack/cinder,Thingee/cinder,bswartz/cinder,mahak/cinder,hguemar/cinder,Thingee/cinder,ntt-sic/cinder,leilihh/cinder,saeki-masaki/cinder,blueboxgroup/cinder,ge0rgi/cinder,alex8866/cinder,scality/cinder,spring-week-topos/cinder-week,maelnor/cinder,petrutlucian94/cinder,potsmaster/cinder,Akrog/cinder,scottdangelo/RemoveVolumeMangerLocks,rakeshmi/cinder,Nexenta/cinder,redhat-openstack/cinder,duhzecca/cinder,Nexenta/cinder,NetApp/cinder,Accelerite/cinder,openstack/cinder,eharney/cinder,manojhirway/ExistingImagesOnNFS,JioCloud/cinder,manojhirway/ExistingImagesOnNFS,sasukeh/cinder,saeki-masaki/cinder,NeCTAR-RC/cinder,cloudbase/cinder,Accelerite/cinder,sasukeh/cinder,takeshineshiro/cinder,Hybrid-Cloud/cinder,CloudServer/cinder,tlakshman26/cinder-https-changes,inkerra/cinder,scality/cinder,tlakshman26/cinder-bug-fix-volume-conversion-full,cloudbau/cinder,apporc/cinder
python
## Code Before: from sqlalchemy import Column from sqlalchemy import MetaData, String, Table def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine snapshots = Table('snapshots', meta, autoload=True) provider_location = Column('provider_location', String(255)) snapshots.create_column(provider_location) def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine snapshots = Table('snapshots', meta, autoload=True) provider_location = snapshots.columns.provider_location provider_location.drop() ## Instruction: Fix provider_location column add for PSQL Migration 006 (commit 690cae58e6bbac5758ea2f7b60774c797d28fba5) didn't work properly for postgres, this patch corrects the upgrade by ensuring the execute is performed and the value is initialized to None. Since we haven't released a milestone etc with this migration in the code it should be safe to just fix it here and submit. Change-Id: I10a09aed3470c35c8ebbe22f29aa511592167c35 ## Code After: from sqlalchemy import Column from sqlalchemy import MetaData, String, Table def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine snapshots = Table('snapshots', meta, autoload=True) provider_location = Column('provider_location', String(255)) snapshots.create_column(provider_location) snapshots.update().values(provider_location=None).execute() def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine snapshots = Table('snapshots', meta, autoload=True) provider_location = snapshots.columns.provider_location snapshots.drop_column(provider_location)
// ... existing code ... snapshots = Table('snapshots', meta, autoload=True) provider_location = Column('provider_location', String(255)) snapshots.create_column(provider_location) snapshots.update().values(provider_location=None).execute() def downgrade(migrate_engine): // ... modified code ... snapshots = Table('snapshots', meta, autoload=True) provider_location = snapshots.columns.provider_location snapshots.drop_column(provider_location) // ... rest of the code ...
20115684ea5ab52e0c51f43fd85aa9945560d103
interleave-pdf.py
interleave-pdf.py
import PyPDF2 from formlayout import fedit def main(): paths = [('Input', ''), ('Output', '')] pathsRead = fedit(paths, title="Interleave pdf", comment="Enter the full path to the source pdf and a path to output the result." ) # Full path to files should be specified eg C:\Users\Sam\Documents\Input.pdf and C:\Users\Sam\Documents\Input.pdf document = PyPDF2.PdfFileReader(pathsRead[0]) writer = PyPDF2.PdfFileWriter() for page in document.pages: writer.addPage(page) writer.addBlankPage() outputStream = open(pathsRead[1], 'wb') writer.write(outputStream) outputStream.close() if __name__ == "__main__": main()
import PyPDF2 from tkinter import * from tkinter.filedialog import askopenfilename from tkinter.filedialog import asksaveasfilename class Application(Frame): def __init__(self): self.input_path = None; self.output_path = None; Frame.__init__(self) self.master.resizable(False, False) self.master.title('Interleave PDF') self.grid() self.button = Button(self, text="Select input", command=self.load_file, width=12) self.button.grid(row=1, column=0, sticky=W) self.button = Button(self, text="Select output", command=self.save_file, width=12) self.button.grid(row=1, column=2, sticky=W) self.button = Button(self, text="Interleave", command=self.interleave, width=12) self.button.grid(row=1, column=3, sticky=W) def load_file(self): self.input_path = askopenfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*"))) def save_file(self): self.output_path = asksaveasfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*"))) def interleave(self): if self.input_path and self.output_path: document = PyPDF2.PdfFileReader(self.input_path) writer = PyPDF2.PdfFileWriter() for page in document.pages: writer.addPage(page) writer.addBlankPage() outputStream = open(self.output_path, 'wb') writer.write(outputStream) outputStream.close() if __name__ == "__main__": Application().mainloop()
Replace formlayout GUI with tkinter
Replace formlayout GUI with tkinter Separate buttons for selecting input and output, and for running the interleave procedure.
Python
mit
sproberts92/interleave-pdf
python
## Code Before: import PyPDF2 from formlayout import fedit def main(): paths = [('Input', ''), ('Output', '')] pathsRead = fedit(paths, title="Interleave pdf", comment="Enter the full path to the source pdf and a path to output the result." ) # Full path to files should be specified eg C:\Users\Sam\Documents\Input.pdf and C:\Users\Sam\Documents\Input.pdf document = PyPDF2.PdfFileReader(pathsRead[0]) writer = PyPDF2.PdfFileWriter() for page in document.pages: writer.addPage(page) writer.addBlankPage() outputStream = open(pathsRead[1], 'wb') writer.write(outputStream) outputStream.close() if __name__ == "__main__": main() ## Instruction: Replace formlayout GUI with tkinter Separate buttons for selecting input and output, and for running the interleave procedure. ## Code After: import PyPDF2 from tkinter import * from tkinter.filedialog import askopenfilename from tkinter.filedialog import asksaveasfilename class Application(Frame): def __init__(self): self.input_path = None; self.output_path = None; Frame.__init__(self) self.master.resizable(False, False) self.master.title('Interleave PDF') self.grid() self.button = Button(self, text="Select input", command=self.load_file, width=12) self.button.grid(row=1, column=0, sticky=W) self.button = Button(self, text="Select output", command=self.save_file, width=12) self.button.grid(row=1, column=2, sticky=W) self.button = Button(self, text="Interleave", command=self.interleave, width=12) self.button.grid(row=1, column=3, sticky=W) def load_file(self): self.input_path = askopenfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*"))) def save_file(self): self.output_path = asksaveasfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*"))) def interleave(self): if self.input_path and self.output_path: document = PyPDF2.PdfFileReader(self.input_path) writer = PyPDF2.PdfFileWriter() for page in document.pages: writer.addPage(page) writer.addBlankPage() outputStream = open(self.output_path, 'wb') writer.write(outputStream) outputStream.close() if __name__ == "__main__": Application().mainloop()
// ... existing code ... import PyPDF2 from tkinter import * from tkinter.filedialog import askopenfilename from tkinter.filedialog import asksaveasfilename class Application(Frame): def __init__(self): self.input_path = None; self.output_path = None; Frame.__init__(self) self.master.resizable(False, False) self.master.title('Interleave PDF') self.grid() self.button = Button(self, text="Select input", command=self.load_file, width=12) self.button.grid(row=1, column=0, sticky=W) self.button = Button(self, text="Select output", command=self.save_file, width=12) self.button.grid(row=1, column=2, sticky=W) self.button = Button(self, text="Interleave", command=self.interleave, width=12) self.button.grid(row=1, column=3, sticky=W) def load_file(self): self.input_path = askopenfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*"))) def save_file(self): self.output_path = asksaveasfilename(filetypes=(("Adobe PDF Files", "*.pdf"), ("All files", "*.*"))) def interleave(self): if self.input_path and self.output_path: document = PyPDF2.PdfFileReader(self.input_path) writer = PyPDF2.PdfFileWriter() for page in document.pages: writer.addPage(page) writer.addBlankPage() outputStream = open(self.output_path, 'wb') writer.write(outputStream) outputStream.close() if __name__ == "__main__": Application().mainloop() // ... rest of the code ...
cb08d25f49b8b4c5177c8afdd9a69330992ee854
tests/replay/test_replay.py
tests/replay/test_replay.py
import pytest from cookiecutter import replay, main, exceptions def test_get_replay_file_name(): """Make sure that replay.get_file_name generates a valid json file path.""" assert replay.get_file_name('foo', 'bar') == 'foo/bar.json' @pytest.fixture(params=[ {'no_input': True}, {'extra_context': {}}, {'no_input': True, 'extra_context': {}}, ]) def invalid_kwargs(request): return request.param def test_raise_on_invalid_mode(invalid_kwargs): with pytest.raises(exceptions.InvalidModeException): main.cookiecutter('foo', replay=True, **invalid_kwargs)
import pytest from cookiecutter import replay, main, exceptions def test_get_replay_file_name(): """Make sure that replay.get_file_name generates a valid json file path.""" assert replay.get_file_name('foo', 'bar') == 'foo/bar.json' @pytest.fixture(params=[ {'no_input': True}, {'extra_context': {}}, {'no_input': True, 'extra_context': {}}, ]) def invalid_kwargs(request): return request.param def test_raise_on_invalid_mode(invalid_kwargs): with pytest.raises(exceptions.InvalidModeException): main.cookiecutter('foo', replay=True, **invalid_kwargs) def test_main_does_not_invoke_dump_but_load(mocker): mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') mock_gen_context = mocker.patch('cookiecutter.main.generate_context') mock_gen_files = mocker.patch('cookiecutter.main.generate_files') mock_replay_dump = mocker.patch('cookiecutter.main.dump') mock_replay_load = mocker.patch('cookiecutter.main.load') main.cookiecutter('foobar', replay=True) assert not mock_prompt.called assert not mock_gen_context.called assert not mock_replay_dump.called assert mock_replay_load.called assert mock_gen_files.called def test_main_does_not_invoke_load_but_dump(mocker): mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') mock_gen_context = mocker.patch('cookiecutter.main.generate_context') mock_gen_files = mocker.patch('cookiecutter.main.generate_files') mock_replay_dump = mocker.patch('cookiecutter.main.dump') mock_replay_load = mocker.patch('cookiecutter.main.load') main.cookiecutter('foobar', replay=False) assert mock_prompt.called assert mock_gen_context.called assert mock_replay_dump.called assert not mock_replay_load.called assert mock_gen_files.called
Add tests for a correct behaviour in cookiecutter.main for replay
Add tests for a correct behaviour in cookiecutter.main for replay
Python
bsd-3-clause
christabor/cookiecutter,luzfcb/cookiecutter,hackebrot/cookiecutter,cguardia/cookiecutter,pjbull/cookiecutter,dajose/cookiecutter,michaeljoseph/cookiecutter,moi65/cookiecutter,terryjbates/cookiecutter,takeflight/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,agconti/cookiecutter,cguardia/cookiecutter,christabor/cookiecutter,audreyr/cookiecutter,stevepiercy/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,stevepiercy/cookiecutter,takeflight/cookiecutter,pjbull/cookiecutter,benthomasson/cookiecutter,agconti/cookiecutter,benthomasson/cookiecutter,Springerle/cookiecutter,ramiroluz/cookiecutter,audreyr/cookiecutter,moi65/cookiecutter,dajose/cookiecutter,hackebrot/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter,willingc/cookiecutter
python
## Code Before: import pytest from cookiecutter import replay, main, exceptions def test_get_replay_file_name(): """Make sure that replay.get_file_name generates a valid json file path.""" assert replay.get_file_name('foo', 'bar') == 'foo/bar.json' @pytest.fixture(params=[ {'no_input': True}, {'extra_context': {}}, {'no_input': True, 'extra_context': {}}, ]) def invalid_kwargs(request): return request.param def test_raise_on_invalid_mode(invalid_kwargs): with pytest.raises(exceptions.InvalidModeException): main.cookiecutter('foo', replay=True, **invalid_kwargs) ## Instruction: Add tests for a correct behaviour in cookiecutter.main for replay ## Code After: import pytest from cookiecutter import replay, main, exceptions def test_get_replay_file_name(): """Make sure that replay.get_file_name generates a valid json file path.""" assert replay.get_file_name('foo', 'bar') == 'foo/bar.json' @pytest.fixture(params=[ {'no_input': True}, {'extra_context': {}}, {'no_input': True, 'extra_context': {}}, ]) def invalid_kwargs(request): return request.param def test_raise_on_invalid_mode(invalid_kwargs): with pytest.raises(exceptions.InvalidModeException): main.cookiecutter('foo', replay=True, **invalid_kwargs) def test_main_does_not_invoke_dump_but_load(mocker): mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') mock_gen_context = mocker.patch('cookiecutter.main.generate_context') mock_gen_files = mocker.patch('cookiecutter.main.generate_files') mock_replay_dump = mocker.patch('cookiecutter.main.dump') mock_replay_load = mocker.patch('cookiecutter.main.load') main.cookiecutter('foobar', replay=True) assert not mock_prompt.called assert not mock_gen_context.called assert not mock_replay_dump.called assert mock_replay_load.called assert mock_gen_files.called def test_main_does_not_invoke_load_but_dump(mocker): mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') mock_gen_context = mocker.patch('cookiecutter.main.generate_context') mock_gen_files = mocker.patch('cookiecutter.main.generate_files') mock_replay_dump = mocker.patch('cookiecutter.main.dump') mock_replay_load = mocker.patch('cookiecutter.main.load') main.cookiecutter('foobar', replay=False) assert mock_prompt.called assert mock_gen_context.called assert mock_replay_dump.called assert not mock_replay_load.called assert mock_gen_files.called
... def test_raise_on_invalid_mode(invalid_kwargs): with pytest.raises(exceptions.InvalidModeException): main.cookiecutter('foo', replay=True, **invalid_kwargs) def test_main_does_not_invoke_dump_but_load(mocker): mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') mock_gen_context = mocker.patch('cookiecutter.main.generate_context') mock_gen_files = mocker.patch('cookiecutter.main.generate_files') mock_replay_dump = mocker.patch('cookiecutter.main.dump') mock_replay_load = mocker.patch('cookiecutter.main.load') main.cookiecutter('foobar', replay=True) assert not mock_prompt.called assert not mock_gen_context.called assert not mock_replay_dump.called assert mock_replay_load.called assert mock_gen_files.called def test_main_does_not_invoke_load_but_dump(mocker): mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') mock_gen_context = mocker.patch('cookiecutter.main.generate_context') mock_gen_files = mocker.patch('cookiecutter.main.generate_files') mock_replay_dump = mocker.patch('cookiecutter.main.dump') mock_replay_load = mocker.patch('cookiecutter.main.load') main.cookiecutter('foobar', replay=False) assert mock_prompt.called assert mock_gen_context.called assert mock_replay_dump.called assert not mock_replay_load.called assert mock_gen_files.called ...
c72d9060142fe1de1e2201fc355f2ee95f5354c7
src/waldur_mastermind/invoices/migrations/0023_invoice_current_cost.py
src/waldur_mastermind/invoices/migrations/0023_invoice_current_cost.py
from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): Invoice = apps.get_model('invoices', 'Invoice') for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): from waldur_mastermind.invoices.models import Invoice for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
Fix database migration for invoices application.
Fix database migration for invoices application.
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur
python
## Code Before: from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): Invoice = apps.get_model('invoices', 'Invoice') for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ] ## Instruction: Fix database migration for invoices application. ## Code After: from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): from waldur_mastermind.invoices.models import Invoice for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
// ... existing code ... def migrate_data(apps, schema_editor): from waldur_mastermind.invoices.models import Invoice for invoice in Invoice.objects.all(): invoice.update_current_cost() // ... rest of the code ...
188aec84d6ea4f545509a1dd7d9c607e5828a5a2
src/se/vidstige/jadb/RemoteFile.java
src/se/vidstige/jadb/RemoteFile.java
package se.vidstige.jadb; /** * Created by vidstige on 2014-03-20 */ public class RemoteFile { private final String path; public RemoteFile(String path) { this.path = path; } public String getName() { throw new UnsupportedOperationException(); } public int getSize() { throw new UnsupportedOperationException(); } public long getLastModified() { throw new UnsupportedOperationException(); } public boolean isDirectory() { throw new UnsupportedOperationException(); } public String getPath() { return path;} @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RemoteFile that = (RemoteFile) o; if (!path.equals(that.path)) return false; return true; } @Override public int hashCode() { return path.hashCode(); } }
package se.vidstige.jadb; /** * Created by vidstige on 2014-03-20 */ public class RemoteFile { private final String path; public RemoteFile(String path) { this.path = path; } public String getName() { throw new UnsupportedOperationException(); } public int getSize() { throw new UnsupportedOperationException(); } public long getLastModified() { throw new UnsupportedOperationException(); } public boolean isDirectory() { throw new UnsupportedOperationException(); } public String getPath() { return path;} @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RemoteFile that = (RemoteFile) o; return path.equals(that.path); } @Override public int hashCode() { return path.hashCode(); } }
Return of boolean expressions should not be wrapped into an "if-then-else" statement (squid:S1126)
Fix: Return of boolean expressions should not be wrapped into an "if-then-else" statement (squid:S1126)
Java
apache-2.0
vidstige/jadb,vidstige/jadb
java
## Code Before: package se.vidstige.jadb; /** * Created by vidstige on 2014-03-20 */ public class RemoteFile { private final String path; public RemoteFile(String path) { this.path = path; } public String getName() { throw new UnsupportedOperationException(); } public int getSize() { throw new UnsupportedOperationException(); } public long getLastModified() { throw new UnsupportedOperationException(); } public boolean isDirectory() { throw new UnsupportedOperationException(); } public String getPath() { return path;} @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RemoteFile that = (RemoteFile) o; if (!path.equals(that.path)) return false; return true; } @Override public int hashCode() { return path.hashCode(); } } ## Instruction: Fix: Return of boolean expressions should not be wrapped into an "if-then-else" statement (squid:S1126) ## Code After: package se.vidstige.jadb; /** * Created by vidstige on 2014-03-20 */ public class RemoteFile { private final String path; public RemoteFile(String path) { this.path = path; } public String getName() { throw new UnsupportedOperationException(); } public int getSize() { throw new UnsupportedOperationException(); } public long getLastModified() { throw new UnsupportedOperationException(); } public boolean isDirectory() { throw new UnsupportedOperationException(); } public String getPath() { return path;} @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RemoteFile that = (RemoteFile) o; return path.equals(that.path); } @Override public int hashCode() { return path.hashCode(); } }
// ... existing code ... if (o == null || getClass() != o.getClass()) return false; RemoteFile that = (RemoteFile) o; return path.equals(that.path); } @Override // ... rest of the code ...
2073942c49cb85664c068412951f2c1f7351679f
add_random_answers.py
add_random_answers.py
import pandas as pd import time from datetime import datetime, date start_date = date(2014, 1, 1) end_date = datetime.now() date_range = pd.date_range(start_date, end_date) for date in date_range: print(date)
import pandas as pd import time from datetime import datetime, date from random import randint start_date = date(2014, 1, 1) end_date = datetime.now() date_range = pd.date_range(start_date, end_date) for date in date_range: random_hour = randint(10, 17) random_minute = randint(0, 59) random_second = randint(0, 59) new_date = (date.year, date.month, date.day, random_hour, random_minute, random_second) print(new_date)
Print random time based on date range
Print random time based on date range
Python
mit
andrewlrogers/srvy
python
## Code Before: import pandas as pd import time from datetime import datetime, date start_date = date(2014, 1, 1) end_date = datetime.now() date_range = pd.date_range(start_date, end_date) for date in date_range: print(date) ## Instruction: Print random time based on date range ## Code After: import pandas as pd import time from datetime import datetime, date from random import randint start_date = date(2014, 1, 1) end_date = datetime.now() date_range = pd.date_range(start_date, end_date) for date in date_range: random_hour = randint(10, 17) random_minute = randint(0, 59) random_second = randint(0, 59) new_date = (date.year, date.month, date.day, random_hour, random_minute, random_second) print(new_date)
// ... existing code ... import pandas as pd import time from datetime import datetime, date from random import randint start_date = date(2014, 1, 1) end_date = datetime.now() // ... modified code ... date_range = pd.date_range(start_date, end_date) for date in date_range: random_hour = randint(10, 17) random_minute = randint(0, 59) random_second = randint(0, 59) new_date = (date.year, date.month, date.day, random_hour, random_minute, random_second) print(new_date) // ... rest of the code ...
32789be8f1f98f7538f4452a8118c261037f2d75
tempwatcher/watch.py
tempwatcher/watch.py
import json import requests class TemperatureWatch(object): thermostat_url = None alert_high = 80 alert_low = 60 _last_response = None def get_info(self): r = requests.get(self.thermostat_url + '/tstat') self._last_response = json.loads(r.text) return r.text def check_temp(self): if not self._last_response: self.get_info() if self._last_response['temp'] > self.alert_high: self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp'])) if self._last_response['temp'] < self.alert_low: self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp'])) def alert(self, message): print(message) if __name__ == '__main__': tw = TemperatureWatch() tw.thermostat_url = 'http://10.0.1.52' tw.check_temp()
import json import requests class TemperatureWatch(object): thermostat_url = None alert_high = 80 alert_low = 60 _last_response = None def get_info(self): r = requests.get(self.thermostat_url + '/tstat') self._last_response = json.loads(r.text) return r.text def check_temp(self): if not self._last_response: self.get_info() if self._last_response['temp'] > self.alert_high: self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp'])) if self._last_response['temp'] < self.alert_low: self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp'])) def alert(self, message): print(message) if __name__ == '__main__': thermostat_ip = '10.0.1.53' # simple configuration - set the IP, nothing else. Print the alerts when they occur to stdout. Not very useful though... tw = TemperatureWatch() tw.thermostat_url = 'http://%s' % thermostat_ip tw.check_temp()
Refactor the initialization a bit to make configuration easier.
Refactor the initialization a bit to make configuration easier.
Python
bsd-3-clause
adamfast/tempwatcher
python
## Code Before: import json import requests class TemperatureWatch(object): thermostat_url = None alert_high = 80 alert_low = 60 _last_response = None def get_info(self): r = requests.get(self.thermostat_url + '/tstat') self._last_response = json.loads(r.text) return r.text def check_temp(self): if not self._last_response: self.get_info() if self._last_response['temp'] > self.alert_high: self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp'])) if self._last_response['temp'] < self.alert_low: self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp'])) def alert(self, message): print(message) if __name__ == '__main__': tw = TemperatureWatch() tw.thermostat_url = 'http://10.0.1.52' tw.check_temp() ## Instruction: Refactor the initialization a bit to make configuration easier. ## Code After: import json import requests class TemperatureWatch(object): thermostat_url = None alert_high = 80 alert_low = 60 _last_response = None def get_info(self): r = requests.get(self.thermostat_url + '/tstat') self._last_response = json.loads(r.text) return r.text def check_temp(self): if not self._last_response: self.get_info() if self._last_response['temp'] > self.alert_high: self.alert('Temperature max of %s exceeded. Currently %s' % (self.alert_high, self._last_response['temp'])) if self._last_response['temp'] < self.alert_low: self.alert('Temperature min of %s exceeded. Currently %s' % (self.alert_low, self._last_response['temp'])) def alert(self, message): print(message) if __name__ == '__main__': thermostat_ip = '10.0.1.53' # simple configuration - set the IP, nothing else. Print the alerts when they occur to stdout. Not very useful though... tw = TemperatureWatch() tw.thermostat_url = 'http://%s' % thermostat_ip tw.check_temp()
// ... existing code ... if __name__ == '__main__': thermostat_ip = '10.0.1.53' # simple configuration - set the IP, nothing else. Print the alerts when they occur to stdout. Not very useful though... tw = TemperatureWatch() tw.thermostat_url = 'http://%s' % thermostat_ip tw.check_temp() // ... rest of the code ...
fdd57913aa11c29ecf160f32a9091e59de598899
plugins/YTranslate.py
plugins/YTranslate.py
import logging from urllib.parse import quote from telegram import Bot, Update from telegram.ext import Updater from requests import post import constants # pylint: disable=E0401 import settings LOGGER = logging.getLogger("YTranslate") YAURL = "https://translate.yandex.net/api/v1.5/tr.json/translate?" YAURL += "key=%s" % settings.YANDEX_TRANSLATION_TOKEN def preload(updater: Updater, level): """ This loads whenever plugin starts Even if you dont need it, you SHOULD put at least return None, otherwise your plugin wont load """ return def translate(bot: Bot, update: Update, user, args): # pylint: disable=W0613 """/tl""" if update.message.reply_to_message: if len(args) > 0: lang = args[0].lower() else: lang = "en" yandex = post(YAURL, params={"text":update.message.reply_to_message.text, "lang":lang}).json() try: return yandex["lang"].upper() + "\n" + yandex["text"][0], constants.TEXT except KeyError: return "Unknown language:%s" % args[0].upper(), constants.TEXT COMMANDS = [ { "command":"/tl", "function":translate, "description":"Translates message to english. Example: [In Reply To Message] /tl", "inline_support":True } ]
import logging from urllib.parse import quote from telegram import Bot, Update from telegram.ext import Updater from requests import post import constants # pylint: disable=E0401 import settings import octeon LOGGER = logging.getLogger("YTranslate") YAURL = "https://translate.yandex.net/api/v1.5/tr.json/translate?" YAURL += "key=%s" % settings.YANDEX_TRANSLATION_TOKEN def preload(updater: Updater, level): """ This loads whenever plugin starts Even if you dont need it, you SHOULD put at least return None, otherwise your plugin wont load """ return def translate(bot: Bot, update: Update, user, args): # pylint: disable=W0613 """/tl""" if update.message.reply_to_message: if len(args) > 0: lang = args[0].lower() else: lang = "en" yandex = post(YAURL, params={"text":update.message.reply_to_message.text, "lang":lang}).json() try: return octeon.message(yandex["lang"].upper() + "\n" + yandex["text"][0]) except KeyError: return octeon.message(yandex["error"], failed=True) COMMANDS = [ { "command":"/tl", "function":translate, "description":"Translates message to english. Example: [In Reply To Message] /tl", "inline_support":True } ]
Update translate plugin to new message format
Update translate plugin to new message format
Python
mit
ProtoxiDe22/Octeon
python
## Code Before: import logging from urllib.parse import quote from telegram import Bot, Update from telegram.ext import Updater from requests import post import constants # pylint: disable=E0401 import settings LOGGER = logging.getLogger("YTranslate") YAURL = "https://translate.yandex.net/api/v1.5/tr.json/translate?" YAURL += "key=%s" % settings.YANDEX_TRANSLATION_TOKEN def preload(updater: Updater, level): """ This loads whenever plugin starts Even if you dont need it, you SHOULD put at least return None, otherwise your plugin wont load """ return def translate(bot: Bot, update: Update, user, args): # pylint: disable=W0613 """/tl""" if update.message.reply_to_message: if len(args) > 0: lang = args[0].lower() else: lang = "en" yandex = post(YAURL, params={"text":update.message.reply_to_message.text, "lang":lang}).json() try: return yandex["lang"].upper() + "\n" + yandex["text"][0], constants.TEXT except KeyError: return "Unknown language:%s" % args[0].upper(), constants.TEXT COMMANDS = [ { "command":"/tl", "function":translate, "description":"Translates message to english. Example: [In Reply To Message] /tl", "inline_support":True } ] ## Instruction: Update translate plugin to new message format ## Code After: import logging from urllib.parse import quote from telegram import Bot, Update from telegram.ext import Updater from requests import post import constants # pylint: disable=E0401 import settings import octeon LOGGER = logging.getLogger("YTranslate") YAURL = "https://translate.yandex.net/api/v1.5/tr.json/translate?" YAURL += "key=%s" % settings.YANDEX_TRANSLATION_TOKEN def preload(updater: Updater, level): """ This loads whenever plugin starts Even if you dont need it, you SHOULD put at least return None, otherwise your plugin wont load """ return def translate(bot: Bot, update: Update, user, args): # pylint: disable=W0613 """/tl""" if update.message.reply_to_message: if len(args) > 0: lang = args[0].lower() else: lang = "en" yandex = post(YAURL, params={"text":update.message.reply_to_message.text, "lang":lang}).json() try: return octeon.message(yandex["lang"].upper() + "\n" + yandex["text"][0]) except KeyError: return octeon.message(yandex["error"], failed=True) COMMANDS = [ { "command":"/tl", "function":translate, "description":"Translates message to english. Example: [In Reply To Message] /tl", "inline_support":True } ]
# ... existing code ... import constants # pylint: disable=E0401 import settings import octeon LOGGER = logging.getLogger("YTranslate") YAURL = "https://translate.yandex.net/api/v1.5/tr.json/translate?" # ... modified code ... lang = "en" yandex = post(YAURL, params={"text":update.message.reply_to_message.text, "lang":lang}).json() try: return octeon.message(yandex["lang"].upper() + "\n" + yandex["text"][0]) except KeyError: return octeon.message(yandex["error"], failed=True) COMMANDS = [ { # ... rest of the code ...
0fb7e8d901addc801fb9b99d744666f573f672d3
billjobs/migrations/0003_auto_20160822_2341.py
billjobs/migrations/0003_auto_20160822_2341.py
from __future__ import unicode_literals from django.db import migrations def add_billing_address(apps, schema_editor): ''' Data migration add billing_address in Bill from user billing_address field ''' Bill = apps.get_model('billjobs', 'Bill') for bill in Bill.objects.all(): bill.billing_address = bill.user.billing_address bill.save() class Migration(migrations.Migration): dependencies = [ ('billjobs', '0002_service_is_available_squashed_0005_bill_issuer_address_default'), ] operations = [ migrations.RunPython(add_billing_address), ]
from __future__ import unicode_literals from django.db import migrations, models def add_billing_address(apps, schema_editor): ''' Data migration add billing_address in Bill from user billing_address field ''' Bill = apps.get_model('billjobs', 'Bill') for bill in Bill.objects.all(): bill.billing_address = bill.user.userprofile.billing_address bill.save() class Migration(migrations.Migration): dependencies = [ ('billjobs', '0002_service_is_available_squashed_0005_bill_issuer_address_default'), ] operations = [ migrations.AddField( model_name='bill', name='billing_address', field=models.CharField(max_length=1024), ), migrations.RunPython(add_billing_address), ]
Add billing_address and migrate data
Add billing_address and migrate data
Python
mit
ioO/billjobs
python
## Code Before: from __future__ import unicode_literals from django.db import migrations def add_billing_address(apps, schema_editor): ''' Data migration add billing_address in Bill from user billing_address field ''' Bill = apps.get_model('billjobs', 'Bill') for bill in Bill.objects.all(): bill.billing_address = bill.user.billing_address bill.save() class Migration(migrations.Migration): dependencies = [ ('billjobs', '0002_service_is_available_squashed_0005_bill_issuer_address_default'), ] operations = [ migrations.RunPython(add_billing_address), ] ## Instruction: Add billing_address and migrate data ## Code After: from __future__ import unicode_literals from django.db import migrations, models def add_billing_address(apps, schema_editor): ''' Data migration add billing_address in Bill from user billing_address field ''' Bill = apps.get_model('billjobs', 'Bill') for bill in Bill.objects.all(): bill.billing_address = bill.user.userprofile.billing_address bill.save() class Migration(migrations.Migration): dependencies = [ ('billjobs', '0002_service_is_available_squashed_0005_bill_issuer_address_default'), ] operations = [ migrations.AddField( model_name='bill', name='billing_address', field=models.CharField(max_length=1024), ), migrations.RunPython(add_billing_address), ]
... from __future__ import unicode_literals from django.db import migrations, models def add_billing_address(apps, schema_editor): ''' Data migration add billing_address in Bill from user billing_address ... ''' Bill = apps.get_model('billjobs', 'Bill') for bill in Bill.objects.all(): bill.billing_address = bill.user.userprofile.billing_address bill.save() class Migration(migrations.Migration): ... ] operations = [ migrations.AddField( model_name='bill', name='billing_address', field=models.CharField(max_length=1024), ), migrations.RunPython(add_billing_address), ] ...
35fe7bb6411c8009253bf66fb7739a5d49a7028d
scuole/counties/management/commands/bootstrapcounties.py
scuole/counties/management/commands/bootstrapcounties.py
from __future__ import absolute_import, unicode_literals import csv import os from django.conf import settings from django.core.management.base import BaseCommand from django.utils.text import slugify from ...models import County from scuole.states.models import State class Command(BaseCommand): help = 'Bootstraps County models using DSHS county list.' def handle(self, *args, **options): self.texas = State.objects.get(name='Texas') counties_file = os.path.join( settings.DATA_FOLDER, 'counties', 'counties.csv') with open(counties_file, 'rU') as f: reader = csv.DictReader(f) counties = [] for row in reader: counties.append(self.create_county(row)) County.objects.bulk_create(counties) def create_county(self, county): return County( name=county['County Name'], slug=slugify(county['County Name']), fips=county['FIPS #'], state=self.texas, )
from __future__ import absolute_import, unicode_literals import csv import os from django.conf import settings from django.core.management.base import BaseCommand from django.utils.text import slugify from ...models import County from scuole.states.models import State class Command(BaseCommand): help = 'Bootstraps County models using DSHS county list.' def handle(self, *args, **options): self.texas = State.objects.get(name='Texas') counties_file = os.path.join( settings.DATA_FOLDER, 'counties', 'counties.csv') with open(counties_file, 'rU') as f: reader = csv.DictReader(f) counties = [] for row in reader: counties.append(self.create_county(row)) County.objects.bulk_create(counties) def create_county(self, county): self.stdout.write( 'Creating {} County...'.format(county['County Name'])) return County( name=county['County Name'], slug=slugify(county['County Name']), fips=county['FIPS #'], state=self.texas, )
Add feedback during county loader
Add feedback during county loader
Python
mit
texastribune/scuole,texastribune/scuole,texastribune/scuole,texastribune/scuole
python
## Code Before: from __future__ import absolute_import, unicode_literals import csv import os from django.conf import settings from django.core.management.base import BaseCommand from django.utils.text import slugify from ...models import County from scuole.states.models import State class Command(BaseCommand): help = 'Bootstraps County models using DSHS county list.' def handle(self, *args, **options): self.texas = State.objects.get(name='Texas') counties_file = os.path.join( settings.DATA_FOLDER, 'counties', 'counties.csv') with open(counties_file, 'rU') as f: reader = csv.DictReader(f) counties = [] for row in reader: counties.append(self.create_county(row)) County.objects.bulk_create(counties) def create_county(self, county): return County( name=county['County Name'], slug=slugify(county['County Name']), fips=county['FIPS #'], state=self.texas, ) ## Instruction: Add feedback during county loader ## Code After: from __future__ import absolute_import, unicode_literals import csv import os from django.conf import settings from django.core.management.base import BaseCommand from django.utils.text import slugify from ...models import County from scuole.states.models import State class Command(BaseCommand): help = 'Bootstraps County models using DSHS county list.' def handle(self, *args, **options): self.texas = State.objects.get(name='Texas') counties_file = os.path.join( settings.DATA_FOLDER, 'counties', 'counties.csv') with open(counties_file, 'rU') as f: reader = csv.DictReader(f) counties = [] for row in reader: counties.append(self.create_county(row)) County.objects.bulk_create(counties) def create_county(self, county): self.stdout.write( 'Creating {} County...'.format(county['County Name'])) return County( name=county['County Name'], slug=slugify(county['County Name']), fips=county['FIPS #'], state=self.texas, )
// ... existing code ... County.objects.bulk_create(counties) def create_county(self, county): self.stdout.write( 'Creating {} County...'.format(county['County Name'])) return County( name=county['County Name'], slug=slugify(county['County Name']), // ... rest of the code ...
f4505b225cf0d0d171820eeb633aa506d440090d
album-service-api/src/main/java/com/asteroid/duck/osgi/example/album/AlbumInfoService.java
album-service-api/src/main/java/com/asteroid/duck/osgi/example/album/AlbumInfoService.java
/** * Copyright (c) 2016 Dr. Chris Senior. * See LICENSE.txt for licensing terms */ package com.asteroid.duck.osgi.example.album; /** * Interface to a service that provides information about Underworld albums */ public interface AlbumInfoService { }
/** * Copyright (c) 2016 Dr. Chris Senior. * See LICENSE.txt for licensing terms */ package com.asteroid.duck.osgi.example.album; import java.util.List; /** * Interface to a service that provides information about Underworld albums */ public interface AlbumInfoService { List<Album> getAlbums(); List<Track> getTracks(); }
Add API for album info service
Add API for album info service
Java
mit
duckAsteroid/osgi-test
java
## Code Before: /** * Copyright (c) 2016 Dr. Chris Senior. * See LICENSE.txt for licensing terms */ package com.asteroid.duck.osgi.example.album; /** * Interface to a service that provides information about Underworld albums */ public interface AlbumInfoService { } ## Instruction: Add API for album info service ## Code After: /** * Copyright (c) 2016 Dr. Chris Senior. * See LICENSE.txt for licensing terms */ package com.asteroid.duck.osgi.example.album; import java.util.List; /** * Interface to a service that provides information about Underworld albums */ public interface AlbumInfoService { List<Album> getAlbums(); List<Track> getTracks(); }
# ... existing code ... */ package com.asteroid.duck.osgi.example.album; import java.util.List; /** * Interface to a service that provides information about Underworld albums */ public interface AlbumInfoService { List<Album> getAlbums(); List<Track> getTracks(); } # ... rest of the code ...
272ae9518abe3815f1651a730f14b8c1316f96f3
C/header.h
C/header.h
/*! * @brief Template C-header file * * This is a template C-header file * @author <+AUTHOR+> * @date <+DATE+> * @file <+FILE+> * @version 0.1 */ #ifndef <+FILE_CAPITAL+>_H #define <+FILE_CAPITAL+>_H #if defined(_MSC_VER) # define <+FILE_CAPITAL+>_DLLEXPORT __declspec(dllexport) #elif defined(__GNUC__) # define <+FILE_CAPITAL+>_DLLEXPORT __attribute__((dllexport)) #else # define DLLEXPORT #endif #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ <+CURSOR+> #ifdef __cplusplus } /* extern "C" */ #endif /* __cplusplus */ #endif /* <+FILE_CAPITAL+>_H */
/*! * @brief Template C-header file * * This is a template C-header file * @author <+AUTHOR+> * @date <+DATE+> * @file <+FILE+> * @version 0.1 */ #ifndef <+FILE_CAPITAL+>_H #define <+FILE_CAPITAL+>_H #if defined(_MSC_VER) # define <+FILE_CAPITAL+>_DLLEXPORT __declspec(dllexport) #elif defined(__GNUC__) # define <+FILE_CAPITAL+>_DLLEXPORT __attribute__((dllexport)) #else # define <+FILE_CAPITAL+>_DLLEXPORT #endif #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ <+CURSOR+> #ifdef __cplusplus } /* extern "C" */ #endif /* __cplusplus */ #undef <+FILE_CAPITAL+>_DLLEXPORT #endif /* <+FILE_CAPITAL+>_H */
Add macro for DLL exportation
Add macro for DLL exportation
C
mit
koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate,koturn/kotemplate
c
## Code Before: /*! * @brief Template C-header file * * This is a template C-header file * @author <+AUTHOR+> * @date <+DATE+> * @file <+FILE+> * @version 0.1 */ #ifndef <+FILE_CAPITAL+>_H #define <+FILE_CAPITAL+>_H #if defined(_MSC_VER) # define <+FILE_CAPITAL+>_DLLEXPORT __declspec(dllexport) #elif defined(__GNUC__) # define <+FILE_CAPITAL+>_DLLEXPORT __attribute__((dllexport)) #else # define DLLEXPORT #endif #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ <+CURSOR+> #ifdef __cplusplus } /* extern "C" */ #endif /* __cplusplus */ #endif /* <+FILE_CAPITAL+>_H */ ## Instruction: Add macro for DLL exportation ## Code After: /*! * @brief Template C-header file * * This is a template C-header file * @author <+AUTHOR+> * @date <+DATE+> * @file <+FILE+> * @version 0.1 */ #ifndef <+FILE_CAPITAL+>_H #define <+FILE_CAPITAL+>_H #if defined(_MSC_VER) # define <+FILE_CAPITAL+>_DLLEXPORT __declspec(dllexport) #elif defined(__GNUC__) # define <+FILE_CAPITAL+>_DLLEXPORT __attribute__((dllexport)) #else # define <+FILE_CAPITAL+>_DLLEXPORT #endif #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ <+CURSOR+> #ifdef __cplusplus } /* extern "C" */ #endif /* __cplusplus */ #undef <+FILE_CAPITAL+>_DLLEXPORT #endif /* <+FILE_CAPITAL+>_H */
// ... existing code ... #elif defined(__GNUC__) # define <+FILE_CAPITAL+>_DLLEXPORT __attribute__((dllexport)) #else # define <+FILE_CAPITAL+>_DLLEXPORT #endif #ifdef __cplusplus // ... modified code ... #ifdef __cplusplus } /* extern "C" */ #endif /* __cplusplus */ #undef <+FILE_CAPITAL+>_DLLEXPORT #endif /* <+FILE_CAPITAL+>_H */ // ... rest of the code ...
53f7acf5fc04ca6f86456fda95504ba41046d860
openedx/features/specializations/templatetags/sso_meta_tag.py
openedx/features/specializations/templatetags/sso_meta_tag.py
from django import template from django.template import Template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return Template('<meta name="title" content="${ title }">' + ' ' + '<meta name="description" content="${ subtitle }">' + ' ' + ## OG (Open Graph) title and description added below to give social media info to display ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) '<meta property="og:title" content="${ title }">' + ' ' + '<meta property="og:description" content="${ subtitle }">' + ' ' + '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta property="og:image:width" content="512">' + ' ' + '<meta property="og:image:height" content="512">' + ' ' + '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + '<meta name="twitter:title" content="${ title }">' + ' ' + '<meta name="twitter:description" content="${ subtitle }">').render(context);
from django import template from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return get_template('features/specializations/sso_meta_template.html').render(context.flatten())
Add Django Custom Tag SSO
Add Django Custom Tag SSO
Python
agpl-3.0
philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform
python
## Code Before: from django import template from django.template import Template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return Template('<meta name="title" content="${ title }">' + ' ' + '<meta name="description" content="${ subtitle }">' + ' ' + ## OG (Open Graph) title and description added below to give social media info to display ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) '<meta property="og:title" content="${ title }">' + ' ' + '<meta property="og:description" content="${ subtitle }">' + ' ' + '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta property="og:image:width" content="512">' + ' ' + '<meta property="og:image:height" content="512">' + ' ' + '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + '<meta name="twitter:title" content="${ title }">' + ' ' + '<meta name="twitter:description" content="${ subtitle }">').render(context); ## Instruction: Add Django Custom Tag SSO ## Code After: from django import template from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return get_template('features/specializations/sso_meta_template.html').render(context.flatten())
... from django import template from django.template.loader import get_template register = template.Library() ... @register.simple_tag(takes_context=True) def sso_meta(context): return get_template('features/specializations/sso_meta_template.html').render(context.flatten()) ...
efb91a838ffbe78d12302672f4dafbe46b72c2b1
test/src/test/java/hudson/node_monitors/ClockMonitorDescriptorTest.java
test/src/test/java/hudson/node_monitors/ClockMonitorDescriptorTest.java
package hudson.node_monitors; import org.jvnet.hudson.test.HudsonTestCase; import hudson.slaves.DumbSlave; import hudson.slaves.SlaveComputer; import hudson.util.ClockDifference; import hudson.util.TimeUnit2; /** * @author Richard Mortimer */ public class ClockMonitorDescriptorTest extends HudsonTestCase { /** * Makes sure that it returns sensible values. */ public void testClockMonitor() throws Exception { DumbSlave s = createSlave(); SlaveComputer c = s.getComputer(); c.connect(false).get(); // wait until it's connected if(c.isOffline()) fail("Slave failed to go online: "+c.getLog()); ClockDifference cd = ClockMonitor.DESCRIPTOR.monitor(c); long diff = cd.diff; assertTrue(diff < TimeUnit2.SECONDS.toMillis(5)); assertTrue(diff > TimeUnit2.SECONDS.toMillis(-5)); assertTrue(cd.abs() >= 0); assertTrue(cd.abs() < TimeUnit2.SECONDS.toMillis(5)); assertFalse(cd.isDangerous()); assertTrue("html output too short", cd.toHtml().length() > 0); } }
package hudson.node_monitors; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import hudson.slaves.DumbSlave; import hudson.slaves.SlaveComputer; import hudson.util.ClockDifference; import hudson.util.TimeUnit2; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; /** * @author Richard Mortimer */ public class ClockMonitorDescriptorTest { @Rule public JenkinsRule jenkins = new JenkinsRule(); /** * Makes sure that it returns sensible values. */ @Test public void testClockMonitor() throws Exception { DumbSlave s = jenkins.createOnlineSlave(); SlaveComputer c = s.getComputer(); if(c.isOffline()) fail("Slave failed to go online: "+c.getLog()); ClockDifference cd = ClockMonitor.DESCRIPTOR.monitor(c); long diff = cd.diff; assertTrue(diff < TimeUnit2.SECONDS.toMillis(5)); assertTrue(diff > TimeUnit2.SECONDS.toMillis(-5)); assertTrue(cd.abs() >= 0); assertTrue(cd.abs() < TimeUnit2.SECONDS.toMillis(5)); assertFalse(cd.isDangerous()); assertTrue("html output too short", cd.toHtml().length() > 0); } }
Use JenkinsRule instead of deprecated HudsonTestCase
Use JenkinsRule instead of deprecated HudsonTestCase
Java
mit
aduprat/jenkins,daniel-beck/jenkins,MichaelPranovich/jenkins_sc,kzantow/jenkins,FTG-003/jenkins,MichaelPranovich/jenkins_sc,DoctorQ/jenkins,rsandell/jenkins,mdonohue/jenkins,vijayto/jenkins,guoxu0514/jenkins,DanielWeber/jenkins,liorhson/jenkins,maikeffi/hudson,jpederzolli/jenkins-1,synopsys-arc-oss/jenkins,gorcz/jenkins,vijayto/jenkins,luoqii/jenkins,MarkEWaite/jenkins,lordofthejars/jenkins,6WIND/jenkins,elkingtonmcb/jenkins,AustinKwang/jenkins,aldaris/jenkins,dariver/jenkins,pjanouse/jenkins,lindzh/jenkins,chbiel/jenkins,jglick/jenkins,thomassuckow/jenkins,svanoort/jenkins,tangkun75/jenkins,dbroady1/jenkins,v1v/jenkins,escoem/jenkins,ChrisA89/jenkins,arunsingh/jenkins,SenolOzer/jenkins,andresrc/jenkins,hemantojhaa/jenkins,rashmikanta-1984/jenkins,jcarrothers-sap/jenkins,huybrechts/hudson,duzifang/my-jenkins,huybrechts/hudson,jcarrothers-sap/jenkins,DoctorQ/jenkins,jk47/jenkins,brunocvcunha/jenkins,amuniz/jenkins,yonglehou/jenkins,escoem/jenkins,oleg-nenashev/jenkins,bpzhang/jenkins,FarmGeek4Life/jenkins,olivergondza/jenkins,liorhson/jenkins,NehemiahMi/jenkins,ChrisA89/jenkins,vlajos/jenkins,jglick/jenkins,seanlin816/jenkins,scoheb/jenkins,Krasnyanskiy/jenkins,1and1/jenkins,daniel-beck/jenkins,liupugong/jenkins,luoqii/jenkins,recena/jenkins,patbos/jenkins,oleg-nenashev/jenkins,intelchen/jenkins,gorcz/jenkins,khmarbaise/jenkins,jzjzjzj/jenkins,samatdav/jenkins,jk47/jenkins,scoheb/jenkins,tastatur/jenkins,evernat/jenkins,stephenc/jenkins,albers/jenkins,daspilker/jenkins,paulmillar/jenkins,DoctorQ/jenkins,SebastienGllmt/jenkins,evernat/jenkins,gusreiber/jenkins,Ykus/jenkins,Jochen-A-Fuerbacher/jenkins,singh88/jenkins,wangyikai/jenkins,shahharsh/jenkins,ErikVerheul/jenkins,elkingtonmcb/jenkins,ChrisA89/jenkins,lordofthejars/jenkins,mcanthony/jenkins,chbiel/jenkins,iqstack/jenkins,amruthsoft9/Jenkis,Jimilian/jenkins,arcivanov/jenkins,synopsys-arc-oss/jenkins,MarkEWaite/jenkins,jglick/jenkins,pjanouse/jenkins,1and1/jenkins,jzjzjzj/jenkins,olivergondza/jenkins,MichaelPranovich/jenkins_sc,v1v/jenkins,SenolOzer/jenkins,jcsirot/jenkins,daspilker/jenkins,Wilfred/jenkins,dariver/jenkins,andresrc/jenkins,paulwellnerbou/jenkins,tfennelly/jenkins,damianszczepanik/jenkins,Vlatombe/jenkins,jglick/jenkins,khmarbaise/jenkins,kohsuke/hudson,FTG-003/jenkins,MichaelPranovich/jenkins_sc,ns163/jenkins,jcsirot/jenkins,github-api-test-org/jenkins,KostyaSha/jenkins,andresrc/jenkins,KostyaSha/jenkins,csimons/jenkins,amuniz/jenkins,samatdav/jenkins,my7seven/jenkins,seanlin816/jenkins,christ66/jenkins,mrooney/jenkins,arunsingh/jenkins,dariver/jenkins,azweb76/jenkins,jcarrothers-sap/jenkins,paulmillar/jenkins,nandan4/Jenkins,evernat/jenkins,jhoblitt/jenkins,varmenise/jenkins,jenkinsci/jenkins,batmat/jenkins,alvarolobato/jenkins,godfath3r/jenkins,gorcz/jenkins,duzifang/my-jenkins,Vlatombe/jenkins,dariver/jenkins,jpbriend/jenkins,MichaelPranovich/jenkins_sc,292388900/jenkins,aduprat/jenkins,thomassuckow/jenkins,MadsNielsen/jtemp,christ66/jenkins,ajshastri/jenkins,christ66/jenkins,gitaccountforprashant/gittest,vvv444/jenkins,rsandell/jenkins,aheritier/jenkins,godfath3r/jenkins,ErikVerheul/jenkins,viqueen/jenkins,petermarcoen/jenkins,bpzhang/jenkins,jglick/jenkins,v1v/jenkins,iqstack/jenkins,tastatur/jenkins,gusreiber/jenkins,vjuranek/jenkins,Ykus/jenkins,csimons/jenkins,ydubreuil/jenkins,rsandell/jenkins,ikedam/jenkins,kzantow/jenkins,CodeShane/jenkins,noikiy/jenkins,NehemiahMi/jenkins,amuniz/jenkins,iqstack/jenkins,vijayto/jenkins,maikeffi/hudson,wuwen5/jenkins,mcanthony/jenkins,patbos/jenkins,KostyaSha/jenkins,wangyikai/jenkins,ChrisA89/jenkins,morficus/jenkins,aduprat/jenkins,petermarcoen/jenkins,bpzhang/jenkins,AustinKwang/jenkins,msrb/jenkins,ErikVerheul/jenkins,verbitan/jenkins,alvarolobato/jenkins,Vlatombe/jenkins,aquarellian/jenkins,maikeffi/hudson,amruthsoft9/Jenkis,intelchen/jenkins,DoctorQ/jenkins,h4ck3rm1k3/jenkins,hemantojhaa/jenkins,jzjzjzj/jenkins,alvarolobato/jenkins,SebastienGllmt/jenkins,wuwen5/jenkins,gusreiber/jenkins,sathiya-mit/jenkins,FTG-003/jenkins,akshayabd/jenkins,mrooney/jenkins,aquarellian/jenkins,h4ck3rm1k3/jenkins,rlugojr/jenkins,liupugong/jenkins,ns163/jenkins,6WIND/jenkins,jcsirot/jenkins,ajshastri/jenkins,morficus/jenkins,Wilfred/jenkins,brunocvcunha/jenkins,keyurpatankar/hudson,shahharsh/jenkins,daspilker/jenkins,gusreiber/jenkins,stephenc/jenkins,luoqii/jenkins,dennisjlee/jenkins,mcanthony/jenkins,MadsNielsen/jtemp,amuniz/jenkins,shahharsh/jenkins,luoqii/jenkins,FTG-003/jenkins,kohsuke/hudson,gusreiber/jenkins,godfath3r/jenkins,escoem/jenkins,guoxu0514/jenkins,mdonohue/jenkins,AustinKwang/jenkins,alvarolobato/jenkins,Wilfred/jenkins,arcivanov/jenkins,mcanthony/jenkins,evernat/jenkins,pselle/jenkins,wangyikai/jenkins,azweb76/jenkins,mcanthony/jenkins,deadmoose/jenkins,ndeloof/jenkins,kohsuke/hudson,jenkinsci/jenkins,olivergondza/jenkins,jpbriend/jenkins,evernat/jenkins,khmarbaise/jenkins,Ykus/jenkins,protazy/jenkins,ndeloof/jenkins,iqstack/jenkins,rlugojr/jenkins,iqstack/jenkins,Vlatombe/jenkins,Wilfred/jenkins,arcivanov/jenkins,synopsys-arc-oss/jenkins,FarmGeek4Life/jenkins,lindzh/jenkins,dennisjlee/jenkins,tfennelly/jenkins,godfath3r/jenkins,singh88/jenkins,SenolOzer/jenkins,lordofthejars/jenkins,arunsingh/jenkins,Wilfred/jenkins,Jimilian/jenkins,bkmeneguello/jenkins,escoem/jenkins,samatdav/jenkins,petermarcoen/jenkins,svanoort/jenkins,KostyaSha/jenkins,oleg-nenashev/jenkins,dennisjlee/jenkins,arunsingh/jenkins,liorhson/jenkins,mdonohue/jenkins,vjuranek/jenkins,vijayto/jenkins,my7seven/jenkins,svanoort/jenkins,aquarellian/jenkins,maikeffi/hudson,KostyaSha/jenkins,SebastienGllmt/jenkins,olivergondza/jenkins,paulmillar/jenkins,goldchang/jenkins,ErikVerheul/jenkins,andresrc/jenkins,rashmikanta-1984/jenkins,FarmGeek4Life/jenkins,rlugojr/jenkins,msrb/jenkins,vijayto/jenkins,huybrechts/hudson,albers/jenkins,viqueen/jenkins,chbiel/jenkins,fbelzunc/jenkins,recena/jenkins,thomassuckow/jenkins,thomassuckow/jenkins,daniel-beck/jenkins,noikiy/jenkins,ns163/jenkins,soenter/jenkins,wuwen5/jenkins,jzjzjzj/jenkins,liupugong/jenkins,rashmikanta-1984/jenkins,NehemiahMi/jenkins,6WIND/jenkins,aheritier/jenkins,jglick/jenkins,deadmoose/jenkins,paulmillar/jenkins,ydubreuil/jenkins,vlajos/jenkins,6WIND/jenkins,seanlin816/jenkins,elkingtonmcb/jenkins,dbroady1/jenkins,noikiy/jenkins,Ykus/jenkins,pselle/jenkins,Jimilian/jenkins,goldchang/jenkins,vjuranek/jenkins,liupugong/jenkins,fbelzunc/jenkins,amruthsoft9/Jenkis,aldaris/jenkins,samatdav/jenkins,aquarellian/jenkins,hemantojhaa/jenkins,hplatou/jenkins,FarmGeek4Life/jenkins,vlajos/jenkins,dbroady1/jenkins,azweb76/jenkins,verbitan/jenkins,jk47/jenkins,stephenc/jenkins,msrb/jenkins,akshayabd/jenkins,varmenise/jenkins,chbiel/jenkins,viqueen/jenkins,Jochen-A-Fuerbacher/jenkins,stephenc/jenkins,samatdav/jenkins,v1v/jenkins,mattclark/jenkins,elkingtonmcb/jenkins,kzantow/jenkins,pjanouse/jenkins,ydubreuil/jenkins,paulwellnerbou/jenkins,mcanthony/jenkins,ErikVerheul/jenkins,luoqii/jenkins,scoheb/jenkins,amruthsoft9/Jenkis,daspilker/jenkins,gitaccountforprashant/gittest,github-api-test-org/jenkins,liorhson/jenkins,tastatur/jenkins,wuwen5/jenkins,morficus/jenkins,shahharsh/jenkins,paulmillar/jenkins,akshayabd/jenkins,gorcz/jenkins,sathiya-mit/jenkins,DanielWeber/jenkins,thomassuckow/jenkins,jpbriend/jenkins,liupugong/jenkins,luoqii/jenkins,lilyJi/jenkins,svanoort/jenkins,tfennelly/jenkins,varmenise/jenkins,rlugojr/jenkins,intelchen/jenkins,daspilker/jenkins,scoheb/jenkins,rsandell/jenkins,vijayto/jenkins,jpederzolli/jenkins-1,hemantojhaa/jenkins,goldchang/jenkins,1and1/jenkins,NehemiahMi/jenkins,292388900/jenkins,vvv444/jenkins,duzifang/my-jenkins,daniel-beck/jenkins,gitaccountforprashant/gittest,maikeffi/hudson,ndeloof/jenkins,protazy/jenkins,godfath3r/jenkins,rsandell/jenkins,hashar/jenkins,tangkun75/jenkins,soenter/jenkins,sathiya-mit/jenkins,hashar/jenkins,deadmoose/jenkins,rsandell/jenkins,jpederzolli/jenkins-1,AustinKwang/jenkins,kzantow/jenkins,morficus/jenkins,chbiel/jenkins,gusreiber/jenkins,tfennelly/jenkins,elkingtonmcb/jenkins,godfath3r/jenkins,batmat/jenkins,synopsys-arc-oss/jenkins,ajshastri/jenkins,tastatur/jenkins,ydubreuil/jenkins,Jimilian/jenkins,DoctorQ/jenkins,jenkinsci/jenkins,aldaris/jenkins,liorhson/jenkins,h4ck3rm1k3/jenkins,vvv444/jenkins,FarmGeek4Life/jenkins,ajshastri/jenkins,keyurpatankar/hudson,dennisjlee/jenkins,andresrc/jenkins,everyonce/jenkins,liupugong/jenkins,dbroady1/jenkins,ydubreuil/jenkins,jenkinsci/jenkins,arcivanov/jenkins,lindzh/jenkins,ydubreuil/jenkins,jenkinsci/jenkins,alvarolobato/jenkins,MadsNielsen/jtemp,wangyikai/jenkins,arunsingh/jenkins,Vlatombe/jenkins,Wilfred/jenkins,paulwellnerbou/jenkins,oleg-nenashev/jenkins,aldaris/jenkins,nandan4/Jenkins,keyurpatankar/hudson,amuniz/jenkins,SebastienGllmt/jenkins,DanielWeber/jenkins,sathiya-mit/jenkins,huybrechts/hudson,singh88/jenkins,hashar/jenkins,jhoblitt/jenkins,recena/jenkins,patbos/jenkins,lilyJi/jenkins,iqstack/jenkins,ChrisA89/jenkins,lilyJi/jenkins,protazy/jenkins,albers/jenkins,christ66/jenkins,wuwen5/jenkins,nandan4/Jenkins,tastatur/jenkins,evernat/jenkins,aheritier/jenkins,protazy/jenkins,albers/jenkins,sathiya-mit/jenkins,msrb/jenkins,ikedam/jenkins,intelchen/jenkins,wuwen5/jenkins,daniel-beck/jenkins,292388900/jenkins,albers/jenkins,verbitan/jenkins,scoheb/jenkins,goldchang/jenkins,everyonce/jenkins,ajshastri/jenkins,vlajos/jenkins,bkmeneguello/jenkins,jk47/jenkins,vvv444/jenkins,lordofthejars/jenkins,huybrechts/hudson,amruthsoft9/Jenkis,ErikVerheul/jenkins,duzifang/my-jenkins,scoheb/jenkins,olivergondza/jenkins,arcivanov/jenkins,arcivanov/jenkins,kohsuke/hudson,jk47/jenkins,Ykus/jenkins,dariver/jenkins,tastatur/jenkins,jpbriend/jenkins,csimons/jenkins,christ66/jenkins,albers/jenkins,singh88/jenkins,goldchang/jenkins,gorcz/jenkins,jhoblitt/jenkins,synopsys-arc-oss/jenkins,recena/jenkins,CodeShane/jenkins,azweb76/jenkins,vvv444/jenkins,mrooney/jenkins,thomassuckow/jenkins,keyurpatankar/hudson,recena/jenkins,hashar/jenkins,Krasnyanskiy/jenkins,wangyikai/jenkins,goldchang/jenkins,christ66/jenkins,mcanthony/jenkins,gorcz/jenkins,shahharsh/jenkins,hemantojhaa/jenkins,tfennelly/jenkins,seanlin816/jenkins,soenter/jenkins,msrb/jenkins,h4ck3rm1k3/jenkins,FTG-003/jenkins,everyonce/jenkins,DanielWeber/jenkins,CodeShane/jenkins,viqueen/jenkins,bkmeneguello/jenkins,AustinKwang/jenkins,stephenc/jenkins,stephenc/jenkins,nandan4/Jenkins,h4ck3rm1k3/jenkins,MichaelPranovich/jenkins_sc,ChrisA89/jenkins,jcarrothers-sap/jenkins,mdonohue/jenkins,hplatou/jenkins,292388900/jenkins,jpederzolli/jenkins-1,csimons/jenkins,akshayabd/jenkins,arunsingh/jenkins,shahharsh/jenkins,vjuranek/jenkins,morficus/jenkins,hemantojhaa/jenkins,olivergondza/jenkins,MadsNielsen/jtemp,MadsNielsen/jtemp,lindzh/jenkins,bpzhang/jenkins,brunocvcunha/jenkins,CodeShane/jenkins,yonglehou/jenkins,ajshastri/jenkins,svanoort/jenkins,intelchen/jenkins,aheritier/jenkins,evernat/jenkins,khmarbaise/jenkins,lindzh/jenkins,escoem/jenkins,azweb76/jenkins,gusreiber/jenkins,escoem/jenkins,CodeShane/jenkins,yonglehou/jenkins,brunocvcunha/jenkins,aduprat/jenkins,ikedam/jenkins,kohsuke/hudson,stephenc/jenkins,aduprat/jenkins,CodeShane/jenkins,hplatou/jenkins,keyurpatankar/hudson,nandan4/Jenkins,mattclark/jenkins,Krasnyanskiy/jenkins,dbroady1/jenkins,my7seven/jenkins,Krasnyanskiy/jenkins,batmat/jenkins,azweb76/jenkins,Jochen-A-Fuerbacher/jenkins,pselle/jenkins,mattclark/jenkins,noikiy/jenkins,paulmillar/jenkins,vjuranek/jenkins,bpzhang/jenkins,amuniz/jenkins,protazy/jenkins,damianszczepanik/jenkins,jcsirot/jenkins,jk47/jenkins,SenolOzer/jenkins,pselle/jenkins,github-api-test-org/jenkins,guoxu0514/jenkins,DoctorQ/jenkins,mdonohue/jenkins,github-api-test-org/jenkins,SenolOzer/jenkins,pselle/jenkins,jhoblitt/jenkins,github-api-test-org/jenkins,guoxu0514/jenkins,amruthsoft9/Jenkis,damianszczepanik/jenkins,vijayto/jenkins,verbitan/jenkins,batmat/jenkins,yonglehou/jenkins,khmarbaise/jenkins,fbelzunc/jenkins,pjanouse/jenkins,FarmGeek4Life/jenkins,tastatur/jenkins,lordofthejars/jenkins,intelchen/jenkins,fbelzunc/jenkins,dariver/jenkins,wuwen5/jenkins,aldaris/jenkins,SenolOzer/jenkins,aquarellian/jenkins,daniel-beck/jenkins,godfath3r/jenkins,daspilker/jenkins,Jimilian/jenkins,mrooney/jenkins,viqueen/jenkins,varmenise/jenkins,MarkEWaite/jenkins,daspilker/jenkins,akshayabd/jenkins,noikiy/jenkins,synopsys-arc-oss/jenkins,SebastienGllmt/jenkins,jpbriend/jenkins,andresrc/jenkins,dariver/jenkins,MarkEWaite/jenkins,brunocvcunha/jenkins,NehemiahMi/jenkins,lilyJi/jenkins,hemantojhaa/jenkins,MarkEWaite/jenkins,jpbriend/jenkins,jglick/jenkins,goldchang/jenkins,Krasnyanskiy/jenkins,seanlin816/jenkins,patbos/jenkins,vlajos/jenkins,jpederzolli/jenkins-1,rashmikanta-1984/jenkins,vjuranek/jenkins,kzantow/jenkins,jzjzjzj/jenkins,292388900/jenkins,ndeloof/jenkins,akshayabd/jenkins,jk47/jenkins,hplatou/jenkins,pjanouse/jenkins,MichaelPranovich/jenkins_sc,mdonohue/jenkins,vlajos/jenkins,everyonce/jenkins,keyurpatankar/hudson,deadmoose/jenkins,lordofthejars/jenkins,SebastienGllmt/jenkins,tfennelly/jenkins,deadmoose/jenkins,jenkinsci/jenkins,yonglehou/jenkins,albers/jenkins,patbos/jenkins,alvarolobato/jenkins,Ykus/jenkins,h4ck3rm1k3/jenkins,AustinKwang/jenkins,iqstack/jenkins,ns163/jenkins,jhoblitt/jenkins,dbroady1/jenkins,recena/jenkins,MarkEWaite/jenkins,1and1/jenkins,elkingtonmcb/jenkins,kohsuke/hudson,deadmoose/jenkins,patbos/jenkins,vlajos/jenkins,csimons/jenkins,gitaccountforprashant/gittest,duzifang/my-jenkins,292388900/jenkins,synopsys-arc-oss/jenkins,seanlin816/jenkins,duzifang/my-jenkins,aheritier/jenkins,aheritier/jenkins,Vlatombe/jenkins,aquarellian/jenkins,msrb/jenkins,Wilfred/jenkins,paulwellnerbou/jenkins,mrooney/jenkins,KostyaSha/jenkins,paulwellnerbou/jenkins,Jochen-A-Fuerbacher/jenkins,6WIND/jenkins,csimons/jenkins,pjanouse/jenkins,azweb76/jenkins,jcarrothers-sap/jenkins,bpzhang/jenkins,intelchen/jenkins,MadsNielsen/jtemp,paulwellnerbou/jenkins,daniel-beck/jenkins,jhoblitt/jenkins,guoxu0514/jenkins,bkmeneguello/jenkins,mrooney/jenkins,mattclark/jenkins,KostyaSha/jenkins,Jochen-A-Fuerbacher/jenkins,jpbriend/jenkins,jcsirot/jenkins,soenter/jenkins,rashmikanta-1984/jenkins,dbroady1/jenkins,MarkEWaite/jenkins,Krasnyanskiy/jenkins,fbelzunc/jenkins,ns163/jenkins,pselle/jenkins,damianszczepanik/jenkins,hashar/jenkins,paulmillar/jenkins,my7seven/jenkins,pjanouse/jenkins,mattclark/jenkins,shahharsh/jenkins,keyurpatankar/hudson,SebastienGllmt/jenkins,DanielWeber/jenkins,csimons/jenkins,aldaris/jenkins,protazy/jenkins,mattclark/jenkins,guoxu0514/jenkins,damianszczepanik/jenkins,everyonce/jenkins,hplatou/jenkins,wangyikai/jenkins,liorhson/jenkins,damianszczepanik/jenkins,morficus/jenkins,ikedam/jenkins,batmat/jenkins,msrb/jenkins,singh88/jenkins,paulwellnerbou/jenkins,DoctorQ/jenkins,ns163/jenkins,NehemiahMi/jenkins,samatdav/jenkins,github-api-test-org/jenkins,ErikVerheul/jenkins,yonglehou/jenkins,recena/jenkins,Ykus/jenkins,jzjzjzj/jenkins,singh88/jenkins,damianszczepanik/jenkins,aldaris/jenkins,verbitan/jenkins,Krasnyanskiy/jenkins,escoem/jenkins,rlugojr/jenkins,aduprat/jenkins,lilyJi/jenkins,rashmikanta-1984/jenkins,patbos/jenkins,maikeffi/hudson,v1v/jenkins,hplatou/jenkins,batmat/jenkins,elkingtonmcb/jenkins,jenkinsci/jenkins,daniel-beck/jenkins,Jochen-A-Fuerbacher/jenkins,rsandell/jenkins,arcivanov/jenkins,DanielWeber/jenkins,Jimilian/jenkins,ChrisA89/jenkins,github-api-test-org/jenkins,jzjzjzj/jenkins,everyonce/jenkins,jcarrothers-sap/jenkins,soenter/jenkins,Jimilian/jenkins,bkmeneguello/jenkins,tangkun75/jenkins,h4ck3rm1k3/jenkins,liupugong/jenkins,FTG-003/jenkins,mattclark/jenkins,rsandell/jenkins,AustinKwang/jenkins,vvv444/jenkins,tfennelly/jenkins,amruthsoft9/Jenkis,gitaccountforprashant/gittest,sathiya-mit/jenkins,ydubreuil/jenkins,aheritier/jenkins,mrooney/jenkins,aduprat/jenkins,tangkun75/jenkins,v1v/jenkins,my7seven/jenkins,tangkun75/jenkins,292388900/jenkins,akshayabd/jenkins,andresrc/jenkins,noikiy/jenkins,MadsNielsen/jtemp,seanlin816/jenkins,varmenise/jenkins,6WIND/jenkins,CodeShane/jenkins,pselle/jenkins,arunsingh/jenkins,petermarcoen/jenkins,my7seven/jenkins,kohsuke/hudson,1and1/jenkins,gitaccountforprashant/gittest,ndeloof/jenkins,fbelzunc/jenkins,lindzh/jenkins,kzantow/jenkins,1and1/jenkins,shahharsh/jenkins,rlugojr/jenkins,goldchang/jenkins,oleg-nenashev/jenkins,gorcz/jenkins,aquarellian/jenkins,hashar/jenkins,noikiy/jenkins,vjuranek/jenkins,ndeloof/jenkins,ikedam/jenkins,kzantow/jenkins,thomassuckow/jenkins,FTG-003/jenkins,huybrechts/hudson,bkmeneguello/jenkins,khmarbaise/jenkins,viqueen/jenkins,dennisjlee/jenkins,luoqii/jenkins,jenkinsci/jenkins,viqueen/jenkins,my7seven/jenkins,christ66/jenkins,maikeffi/hudson,verbitan/jenkins,hplatou/jenkins,nandan4/Jenkins,lordofthejars/jenkins,huybrechts/hudson,jcsirot/jenkins,rlugojr/jenkins,Vlatombe/jenkins,ikedam/jenkins,svanoort/jenkins,deadmoose/jenkins,SenolOzer/jenkins,singh88/jenkins,jcarrothers-sap/jenkins,nandan4/Jenkins,amuniz/jenkins,kohsuke/hudson,fbelzunc/jenkins,ikedam/jenkins,lindzh/jenkins,wangyikai/jenkins,ndeloof/jenkins,varmenise/jenkins,tangkun75/jenkins,lilyJi/jenkins,tangkun75/jenkins,github-api-test-org/jenkins,gitaccountforprashant/gittest,ajshastri/jenkins,maikeffi/hudson,DanielWeber/jenkins,Jochen-A-Fuerbacher/jenkins,everyonce/jenkins,liorhson/jenkins,DoctorQ/jenkins,1and1/jenkins,6WIND/jenkins,petermarcoen/jenkins,KostyaSha/jenkins,verbitan/jenkins,ikedam/jenkins,chbiel/jenkins,dennisjlee/jenkins,jzjzjzj/jenkins,jcsirot/jenkins,yonglehou/jenkins,soenter/jenkins,v1v/jenkins,varmenise/jenkins,sathiya-mit/jenkins,FarmGeek4Life/jenkins,protazy/jenkins,jpederzolli/jenkins-1,oleg-nenashev/jenkins,jpederzolli/jenkins-1,morficus/jenkins,soenter/jenkins,khmarbaise/jenkins,dennisjlee/jenkins,oleg-nenashev/jenkins,hashar/jenkins,alvarolobato/jenkins,duzifang/my-jenkins,brunocvcunha/jenkins,bkmeneguello/jenkins,guoxu0514/jenkins,vvv444/jenkins,petermarcoen/jenkins,samatdav/jenkins,gorcz/jenkins,MarkEWaite/jenkins,brunocvcunha/jenkins,rashmikanta-1984/jenkins,damianszczepanik/jenkins,keyurpatankar/hudson,batmat/jenkins,svanoort/jenkins,NehemiahMi/jenkins,ns163/jenkins,jcarrothers-sap/jenkins,petermarcoen/jenkins,olivergondza/jenkins,mdonohue/jenkins,chbiel/jenkins,bpzhang/jenkins,jhoblitt/jenkins,lilyJi/jenkins,scoheb/jenkins
java
## Code Before: package hudson.node_monitors; import org.jvnet.hudson.test.HudsonTestCase; import hudson.slaves.DumbSlave; import hudson.slaves.SlaveComputer; import hudson.util.ClockDifference; import hudson.util.TimeUnit2; /** * @author Richard Mortimer */ public class ClockMonitorDescriptorTest extends HudsonTestCase { /** * Makes sure that it returns sensible values. */ public void testClockMonitor() throws Exception { DumbSlave s = createSlave(); SlaveComputer c = s.getComputer(); c.connect(false).get(); // wait until it's connected if(c.isOffline()) fail("Slave failed to go online: "+c.getLog()); ClockDifference cd = ClockMonitor.DESCRIPTOR.monitor(c); long diff = cd.diff; assertTrue(diff < TimeUnit2.SECONDS.toMillis(5)); assertTrue(diff > TimeUnit2.SECONDS.toMillis(-5)); assertTrue(cd.abs() >= 0); assertTrue(cd.abs() < TimeUnit2.SECONDS.toMillis(5)); assertFalse(cd.isDangerous()); assertTrue("html output too short", cd.toHtml().length() > 0); } } ## Instruction: Use JenkinsRule instead of deprecated HudsonTestCase ## Code After: package hudson.node_monitors; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import hudson.slaves.DumbSlave; import hudson.slaves.SlaveComputer; import hudson.util.ClockDifference; import hudson.util.TimeUnit2; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; /** * @author Richard Mortimer */ public class ClockMonitorDescriptorTest { @Rule public JenkinsRule jenkins = new JenkinsRule(); /** * Makes sure that it returns sensible values. */ @Test public void testClockMonitor() throws Exception { DumbSlave s = jenkins.createOnlineSlave(); SlaveComputer c = s.getComputer(); if(c.isOffline()) fail("Slave failed to go online: "+c.getLog()); ClockDifference cd = ClockMonitor.DESCRIPTOR.monitor(c); long diff = cd.diff; assertTrue(diff < TimeUnit2.SECONDS.toMillis(5)); assertTrue(diff > TimeUnit2.SECONDS.toMillis(-5)); assertTrue(cd.abs() >= 0); assertTrue(cd.abs() < TimeUnit2.SECONDS.toMillis(5)); assertFalse(cd.isDangerous()); assertTrue("html output too short", cd.toHtml().length() > 0); } }
... package hudson.node_monitors; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import hudson.slaves.DumbSlave; import hudson.slaves.SlaveComputer; import hudson.util.ClockDifference; import hudson.util.TimeUnit2; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; /** * @author Richard Mortimer */ public class ClockMonitorDescriptorTest { @Rule public JenkinsRule jenkins = new JenkinsRule(); /** * Makes sure that it returns sensible values. */ @Test public void testClockMonitor() throws Exception { DumbSlave s = jenkins.createOnlineSlave(); SlaveComputer c = s.getComputer(); if(c.isOffline()) fail("Slave failed to go online: "+c.getLog()); ...
9458ec6fc75d9c3ad0bacec82b8207a5943e9c16
src/main/java/jannotate/domain/Annotation.java
src/main/java/jannotate/domain/Annotation.java
package jannotate.domain; import javax.persistence.Entity; import javax.persistence.Id; @Entity public class Annotation { @Id int id; String name; String api; String use; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getApi() { return api; } public void setApi(String api) { this.api = api; } public String getUse() { return use; } public void setUse(String use) { this.use = use; } }
package jannotate.domain; import java.lang.annotation.RetentionPolicy; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.Id; @Entity public class Annotation { @Id int id; String api; String name; RetentionPolicy retention; String use; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getApi() { return api; } public void setApi(String api) { this.api = api; } @Enumerated(EnumType.ORDINAL) public RetentionPolicy getRetention() { return retention; } public void setRetention(RetentionPolicy retention) { this.retention = retention; } public String getUse() { return use; } public void setUse(String use) { this.use = use; } }
Add field for retention policy.
Add field for retention policy.
Java
bsd-2-clause
IanDarwin/jannotate
java
## Code Before: package jannotate.domain; import javax.persistence.Entity; import javax.persistence.Id; @Entity public class Annotation { @Id int id; String name; String api; String use; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getApi() { return api; } public void setApi(String api) { this.api = api; } public String getUse() { return use; } public void setUse(String use) { this.use = use; } } ## Instruction: Add field for retention policy. ## Code After: package jannotate.domain; import java.lang.annotation.RetentionPolicy; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.Id; @Entity public class Annotation { @Id int id; String api; String name; RetentionPolicy retention; String use; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getApi() { return api; } public void setApi(String api) { this.api = api; } @Enumerated(EnumType.ORDINAL) public RetentionPolicy getRetention() { return retention; } public void setRetention(RetentionPolicy retention) { this.retention = retention; } public String getUse() { return use; } public void setUse(String use) { this.use = use; } }
# ... existing code ... package jannotate.domain; import java.lang.annotation.RetentionPolicy; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.Id; @Entity # ... modified code ... public class Annotation { @Id int id; String api; String name; RetentionPolicy retention; String use; public int getId() { ... public void setApi(String api) { this.api = api; } @Enumerated(EnumType.ORDINAL) public RetentionPolicy getRetention() { return retention; } public void setRetention(RetentionPolicy retention) { this.retention = retention; } public String getUse() { return use; } # ... rest of the code ...
f0da6d49024cf824f161ecbecf112916dd859a0b
src/main/java/semantic/SemanticHelper.java
src/main/java/semantic/SemanticHelper.java
package semantic; import error.CompilerError; import parse.Loc; import types.Type; public interface SemanticHelper { static CompilerError typeMismatch(Loc loc, Type found, Type... expected) { final StringBuilder builder = new StringBuilder(); final int n = expected.length; if (n > 0) { builder.append(expected[0]); if (n > 1) { for (int i = 1; i < n - 2; i++) builder.append(", ").append(expected[i]); builder.append(" or ").append(expected[n - 1]); } } return new CompilerError(loc, "type mismatch: found %s but expected %s", found, builder); } static CompilerError undefined(Loc loc, String category, String name) { return new CompilerError(loc, "undefined %s '%s'", category, name); } static CompilerError notAFunction(Loc loc, String name) { return new CompilerError(loc, "'%s' is not a function", name); } static CompilerError tooFewArguments(Loc loc, String name) { return new CompilerError(loc, "too few arguments in call to '%s'", name); } static CompilerError tooMuchArguments(Loc loc, String name) { return new CompilerError(loc, "too much arguments in call to '%s'", name); } }
package semantic; import error.CompilerError; import parse.Loc; import types.Type; public interface SemanticHelper { static CompilerError error(String message) { return new CompilerError(message); } static CompilerError error(String format, Object... args) { return new CompilerError(format, args); } static CompilerError error(Loc loc, String format, Object... args) { return new CompilerError(loc, format, args); } static CompilerError typeMismatch(Loc loc, Type found, Type... expected) { final StringBuilder builder = new StringBuilder(); final int n = expected.length; if (n > 0) { builder.append(expected[0]); if (n > 1) { for (int i = 1; i < n - 2; i++) builder.append(", ").append(expected[i]); builder.append(" or ").append(expected[n - 1]); } } return error(loc, "type mismatch: found %s but expected %s", found, builder); } static CompilerError undefined(Loc loc, String category, String name) { return new CompilerError(loc, "undefined %s '%s'", category, name); } static CompilerError notAFunction(Loc loc, String name) { return error(loc, "'%s' is not a function", name); } static CompilerError tooFewArguments(Loc loc, String name) { return error(loc, "too few arguments in call to '%s'", name); } static CompilerError tooMuchArguments(Loc loc, String name) { return error(loc, "too much arguments in call to '%s'", name); } }
Add more helper methods for reporting errors
Add more helper methods for reporting errors
Java
mit
romildo/eplan,romildo/eplan
java
## Code Before: package semantic; import error.CompilerError; import parse.Loc; import types.Type; public interface SemanticHelper { static CompilerError typeMismatch(Loc loc, Type found, Type... expected) { final StringBuilder builder = new StringBuilder(); final int n = expected.length; if (n > 0) { builder.append(expected[0]); if (n > 1) { for (int i = 1; i < n - 2; i++) builder.append(", ").append(expected[i]); builder.append(" or ").append(expected[n - 1]); } } return new CompilerError(loc, "type mismatch: found %s but expected %s", found, builder); } static CompilerError undefined(Loc loc, String category, String name) { return new CompilerError(loc, "undefined %s '%s'", category, name); } static CompilerError notAFunction(Loc loc, String name) { return new CompilerError(loc, "'%s' is not a function", name); } static CompilerError tooFewArguments(Loc loc, String name) { return new CompilerError(loc, "too few arguments in call to '%s'", name); } static CompilerError tooMuchArguments(Loc loc, String name) { return new CompilerError(loc, "too much arguments in call to '%s'", name); } } ## Instruction: Add more helper methods for reporting errors ## Code After: package semantic; import error.CompilerError; import parse.Loc; import types.Type; public interface SemanticHelper { static CompilerError error(String message) { return new CompilerError(message); } static CompilerError error(String format, Object... args) { return new CompilerError(format, args); } static CompilerError error(Loc loc, String format, Object... args) { return new CompilerError(loc, format, args); } static CompilerError typeMismatch(Loc loc, Type found, Type... expected) { final StringBuilder builder = new StringBuilder(); final int n = expected.length; if (n > 0) { builder.append(expected[0]); if (n > 1) { for (int i = 1; i < n - 2; i++) builder.append(", ").append(expected[i]); builder.append(" or ").append(expected[n - 1]); } } return error(loc, "type mismatch: found %s but expected %s", found, builder); } static CompilerError undefined(Loc loc, String category, String name) { return new CompilerError(loc, "undefined %s '%s'", category, name); } static CompilerError notAFunction(Loc loc, String name) { return error(loc, "'%s' is not a function", name); } static CompilerError tooFewArguments(Loc loc, String name) { return error(loc, "too few arguments in call to '%s'", name); } static CompilerError tooMuchArguments(Loc loc, String name) { return error(loc, "too much arguments in call to '%s'", name); } }
// ... existing code ... import types.Type; public interface SemanticHelper { static CompilerError error(String message) { return new CompilerError(message); } static CompilerError error(String format, Object... args) { return new CompilerError(format, args); } static CompilerError error(Loc loc, String format, Object... args) { return new CompilerError(loc, format, args); } static CompilerError typeMismatch(Loc loc, Type found, Type... expected) { final StringBuilder builder = new StringBuilder(); // ... modified code ... builder.append(" or ").append(expected[n - 1]); } } return error(loc, "type mismatch: found %s but expected %s", found, builder); } static CompilerError undefined(Loc loc, String category, String name) { ... } static CompilerError notAFunction(Loc loc, String name) { return error(loc, "'%s' is not a function", name); } static CompilerError tooFewArguments(Loc loc, String name) { return error(loc, "too few arguments in call to '%s'", name); } static CompilerError tooMuchArguments(Loc loc, String name) { return error(loc, "too much arguments in call to '%s'", name); } } // ... rest of the code ...
d379bc510cac88434e9eb26a630e819870608b04
setup.py
setup.py
from setuptools import setup, find_packages setup( name='matmeta', version='0.1.0', url='https://github.com/jasonthiese/CommonMetadata', description='Common support for meta-data', author='Jason Thiese', author_email="[email protected]", license="Apache v2", packages=find_packages(), install_requires=[ 'pypif' ] )
from setuptools import setup, find_packages setup( name='matmeta', version='0.1.0', url='https://github.com/MaterialsDataInfrastructureConsortium/CommonMetadata', description='Common support for materials metadata', author='Jason Thiese', author_email="[email protected]", license="Apache v2", packages=find_packages(), install_requires=[ 'pypif' ] )
Update the URL while we're at it
Update the URL while we're at it
Python
apache-2.0
MaterialsDataInfrastructureConsortium/CommonMetadata
python
## Code Before: from setuptools import setup, find_packages setup( name='matmeta', version='0.1.0', url='https://github.com/jasonthiese/CommonMetadata', description='Common support for meta-data', author='Jason Thiese', author_email="[email protected]", license="Apache v2", packages=find_packages(), install_requires=[ 'pypif' ] ) ## Instruction: Update the URL while we're at it ## Code After: from setuptools import setup, find_packages setup( name='matmeta', version='0.1.0', url='https://github.com/MaterialsDataInfrastructureConsortium/CommonMetadata', description='Common support for materials metadata', author='Jason Thiese', author_email="[email protected]", license="Apache v2", packages=find_packages(), install_requires=[ 'pypif' ] )
# ... existing code ... setup( name='matmeta', version='0.1.0', url='https://github.com/MaterialsDataInfrastructureConsortium/CommonMetadata', description='Common support for materials metadata', author='Jason Thiese', author_email="[email protected]", license="Apache v2", # ... rest of the code ...
6384e6a23f73eddf1099e01ed0d8c067141651a5
tcelery/__init__.py
tcelery/__init__.py
from __future__ import absolute_import import celery from tornado import ioloop from .connection import ConnectionPool from .producer import NonBlockingTaskProducer from .result import AsyncResult VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) + '-dev' def setup_nonblocking_producer(celery_app=None, io_loop=None, on_ready=None, result_cls=AsyncResult, limit=1): celery_app = celery_app or celery.current_app io_loop = io_loop or ioloop.IOLoop.instance() NonBlockingTaskProducer.app = celery_app NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop) NonBlockingTaskProducer.result_cls = result_cls if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'): celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer def connect(): broker_url = celery_app.connection().as_uri(include_password=True) options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {}) NonBlockingTaskProducer.conn_pool.connect(broker_url, options=options, callback=on_ready) io_loop.add_callback(connect)
from __future__ import absolute_import import celery from tornado import ioloop from .connection import ConnectionPool from .producer import NonBlockingTaskProducer from .result import AsyncResult VERSION = (0, 3, 4) __version__ = '.'.join(map(str, VERSION)) def setup_nonblocking_producer(celery_app=None, io_loop=None, on_ready=None, result_cls=AsyncResult, limit=1): celery_app = celery_app or celery.current_app io_loop = io_loop or ioloop.IOLoop.instance() NonBlockingTaskProducer.app = celery_app NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop) NonBlockingTaskProducer.result_cls = result_cls if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'): celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer def connect(): broker_url = celery_app.connection().as_uri(include_password=True) options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {}) NonBlockingTaskProducer.conn_pool.connect(broker_url, options=options, callback=on_ready) io_loop.add_callback(connect)
Set release version to 0.3.4
Set release version to 0.3.4
Python
bsd-3-clause
shnjp/tornado-celery,qudos-com/tornado-celery,mher/tornado-celery,sangwonl/tornado-celery
python
## Code Before: from __future__ import absolute_import import celery from tornado import ioloop from .connection import ConnectionPool from .producer import NonBlockingTaskProducer from .result import AsyncResult VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) + '-dev' def setup_nonblocking_producer(celery_app=None, io_loop=None, on_ready=None, result_cls=AsyncResult, limit=1): celery_app = celery_app or celery.current_app io_loop = io_loop or ioloop.IOLoop.instance() NonBlockingTaskProducer.app = celery_app NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop) NonBlockingTaskProducer.result_cls = result_cls if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'): celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer def connect(): broker_url = celery_app.connection().as_uri(include_password=True) options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {}) NonBlockingTaskProducer.conn_pool.connect(broker_url, options=options, callback=on_ready) io_loop.add_callback(connect) ## Instruction: Set release version to 0.3.4 ## Code After: from __future__ import absolute_import import celery from tornado import ioloop from .connection import ConnectionPool from .producer import NonBlockingTaskProducer from .result import AsyncResult VERSION = (0, 3, 4) __version__ = '.'.join(map(str, VERSION)) def setup_nonblocking_producer(celery_app=None, io_loop=None, on_ready=None, result_cls=AsyncResult, limit=1): celery_app = celery_app or celery.current_app io_loop = io_loop or ioloop.IOLoop.instance() NonBlockingTaskProducer.app = celery_app NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop) NonBlockingTaskProducer.result_cls = result_cls if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'): celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer def connect(): broker_url = celery_app.connection().as_uri(include_password=True) options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {}) NonBlockingTaskProducer.conn_pool.connect(broker_url, options=options, callback=on_ready) io_loop.add_callback(connect)
... from .producer import NonBlockingTaskProducer from .result import AsyncResult VERSION = (0, 3, 4) __version__ = '.'.join(map(str, VERSION)) def setup_nonblocking_producer(celery_app=None, io_loop=None, ...
793baa838b7bc7bfed3eb74a69c297645b4c5da6
app/passthrough/views.py
app/passthrough/views.py
import boto3 import botocore from flask import ( abort, current_app, flash, make_response, redirect, request, Response, url_for, ) from flask_login import current_user from . import passthrough_bp @passthrough_bp.route('/<path:path>') def passthrough(path): if not current_user.is_authenticated: return redirect(url_for('auth.login', next=request.path)) else: default_page = current_app.config.get('S3_INDEX_DOCUMENT') if default_page and path.endswith('/'): path += default_page s3 = boto3.resource('s3') bucket = current_app.config.get('S3_BUCKET') obj = s3.Object(bucket, path) try: obj_resp = obj.get() def generate(result): for chunk in iter(lambda: result['Body'].read(8192), b''): yield chunk response = Response(generate(obj_resp)) response.headers['Content-Type'] = obj_resp['ContentType'] response.headers['Content-Encoding'] = obj_resp['ContentEncoding'] return response except botocore.exceptions.ClientError as e: if e.response['Error']['Code'] == "404": abort(404) elif e.response['Error']['Code'] == 'NoSuchKey': abort(404) else: raise
import boto3 import botocore from flask import ( abort, current_app, flash, make_response, redirect, request, Response, url_for, ) from flask_login import current_user from . import passthrough_bp @passthrough_bp.route('/<path:path>') def passthrough(path): if not current_user.is_authenticated: return redirect(url_for('auth.login', next=request.path)) else: default_page = current_app.config.get('S3_INDEX_DOCUMENT') if default_page and (path == '' or path.endswith('/')): path += default_page s3 = boto3.resource('s3') bucket = current_app.config.get('S3_BUCKET') obj = s3.Object(bucket, path) try: obj_resp = obj.get() def generate(result): for chunk in iter(lambda: result['Body'].read(8192), b''): yield chunk response = Response(generate(obj_resp)) response.headers['Content-Type'] = obj_resp['ContentType'] response.headers['Content-Encoding'] = obj_resp['ContentEncoding'] return response except botocore.exceptions.ClientError as e: if e.response['Error']['Code'] == "404": abort(404) elif e.response['Error']['Code'] == 'NoSuchKey': abort(404) else: raise
Handle the case of an empty path
Handle the case of an empty path This will deal with the root domain request going to default page.
Python
mit
iandees/bucket-protection,iandees/bucket-protection
python
## Code Before: import boto3 import botocore from flask import ( abort, current_app, flash, make_response, redirect, request, Response, url_for, ) from flask_login import current_user from . import passthrough_bp @passthrough_bp.route('/<path:path>') def passthrough(path): if not current_user.is_authenticated: return redirect(url_for('auth.login', next=request.path)) else: default_page = current_app.config.get('S3_INDEX_DOCUMENT') if default_page and path.endswith('/'): path += default_page s3 = boto3.resource('s3') bucket = current_app.config.get('S3_BUCKET') obj = s3.Object(bucket, path) try: obj_resp = obj.get() def generate(result): for chunk in iter(lambda: result['Body'].read(8192), b''): yield chunk response = Response(generate(obj_resp)) response.headers['Content-Type'] = obj_resp['ContentType'] response.headers['Content-Encoding'] = obj_resp['ContentEncoding'] return response except botocore.exceptions.ClientError as e: if e.response['Error']['Code'] == "404": abort(404) elif e.response['Error']['Code'] == 'NoSuchKey': abort(404) else: raise ## Instruction: Handle the case of an empty path This will deal with the root domain request going to default page. ## Code After: import boto3 import botocore from flask import ( abort, current_app, flash, make_response, redirect, request, Response, url_for, ) from flask_login import current_user from . import passthrough_bp @passthrough_bp.route('/<path:path>') def passthrough(path): if not current_user.is_authenticated: return redirect(url_for('auth.login', next=request.path)) else: default_page = current_app.config.get('S3_INDEX_DOCUMENT') if default_page and (path == '' or path.endswith('/')): path += default_page s3 = boto3.resource('s3') bucket = current_app.config.get('S3_BUCKET') obj = s3.Object(bucket, path) try: obj_resp = obj.get() def generate(result): for chunk in iter(lambda: result['Body'].read(8192), b''): yield chunk response = Response(generate(obj_resp)) response.headers['Content-Type'] = obj_resp['ContentType'] response.headers['Content-Encoding'] = obj_resp['ContentEncoding'] return response except botocore.exceptions.ClientError as e: if e.response['Error']['Code'] == "404": abort(404) elif e.response['Error']['Code'] == 'NoSuchKey': abort(404) else: raise
# ... existing code ... return redirect(url_for('auth.login', next=request.path)) else: default_page = current_app.config.get('S3_INDEX_DOCUMENT') if default_page and (path == '' or path.endswith('/')): path += default_page s3 = boto3.resource('s3') # ... rest of the code ...
dc47c88d5f1c6f1e78322c5bfcb585e54b3a0c0a
python/colorTest.py
python/colorTest.py
from rgbmatrix import RGBMatrix from random import randint import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width for x in range(width): for y in range(height): r = int(y % 8 / 7.0 * 255) g = int(x % 8 / 7.0 * 255) b = int((int(x / 8) + int(y / 8) * 4) / 7.0 * 255) print r ledMatrix.SetPixel(x, y, r, g, b) time.sleep(0.05) time.sleep(5) ledMatrix.Clear()
from rgbmatrix import RGBMatrix from random import randint import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width for x in range(width): for y in range(height): r = int(y % 8 / 7.0 * 255) g = int(x % 8 / 7.0 * 255) b = int((int(x / 8) + int(y / 8) * 4) / 7.0 * 255) ledMatrix.SetPixel(x, y, r, g, b) time.sleep(0.02) time.sleep(5) ledMatrix.Clear()
Remove print and increase speed
Remove print and increase speed
Python
mit
DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix
python
## Code Before: from rgbmatrix import RGBMatrix from random import randint import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width for x in range(width): for y in range(height): r = int(y % 8 / 7.0 * 255) g = int(x % 8 / 7.0 * 255) b = int((int(x / 8) + int(y / 8) * 4) / 7.0 * 255) print r ledMatrix.SetPixel(x, y, r, g, b) time.sleep(0.05) time.sleep(5) ledMatrix.Clear() ## Instruction: Remove print and increase speed ## Code After: from rgbmatrix import RGBMatrix from random import randint import time rows = 16 chains = 1 parallel = 1 ledMatrix = RGBMatrix(rows, chains, parallel) height = ledMatrix.height width = ledMatrix.width for x in range(width): for y in range(height): r = int(y % 8 / 7.0 * 255) g = int(x % 8 / 7.0 * 255) b = int((int(x / 8) + int(y / 8) * 4) / 7.0 * 255) ledMatrix.SetPixel(x, y, r, g, b) time.sleep(0.02) time.sleep(5) ledMatrix.Clear()
... r = int(y % 8 / 7.0 * 255) g = int(x % 8 / 7.0 * 255) b = int((int(x / 8) + int(y / 8) * 4) / 7.0 * 255) ledMatrix.SetPixel(x, y, r, g, b) time.sleep(0.02) time.sleep(5) ledMatrix.Clear() ...
e2c9d39dd30a60c5c54521d7d11773430cae1bd1
tests/test_image_access.py
tests/test_image_access.py
import pytest import imghdr from io import BytesIO from PIL import Image from pikepdf import _qpdf as qpdf def test_jpeg(resources, outdir): pdf = qpdf.Pdf.open(resources / 'congress.pdf') # If you are looking at this as example code, Im0 is not necessarily the # name of any image. pdfimage = pdf.pages[0].Resources.XObject.Im0 raw_stream = pdf.pages[0].Resources.XObject.Im0.read_raw_stream() with pytest.raises(RuntimeError): pdf.pages[0].Resources.XObject.Im0.read_stream() assert imghdr.what('', h=raw_stream) == 'jpeg' im = Image.open(BytesIO(raw_stream)) assert im.size == (pdfimage.Width, pdfimage.Height) assert im.mode == 'RGB'
import pytest import imghdr from io import BytesIO from PIL import Image import zlib from pikepdf import Pdf, Object def test_jpeg(resources, outdir): pdf = Pdf.open(resources / 'congress.pdf') # If you are looking at this as example code, Im0 is not necessarily the # name of any image. pdfimage = pdf.pages[0].Resources.XObject['/Im0'] raw_bytes = pdfimage.read_raw_bytes() with pytest.raises(RuntimeError): pdfimage.read_bytes() assert imghdr.what('', h=raw_bytes) == 'jpeg' im = Image.open(BytesIO(raw_bytes)) assert im.size == (pdfimage.Width, pdfimage.Height) assert im.mode == 'RGB' def test_replace_jpeg(resources, outdir): pdf = Pdf.open(resources / 'congress.pdf') pdfimage = pdf.pages[0].Resources.XObject['/Im0'] raw_bytes = pdfimage.read_raw_bytes() im = Image.open(BytesIO(raw_bytes)) grayscale = im.convert('L') #newimage = Object.Stream(pdf, grayscale.tobytes()) pdfimage.write(zlib.compress(grayscale.tobytes()), Object.Name("/FlateDecode"), Object.Null()) pdfimage.ColorSpace = Object.Name('/DeviceGray') pdf.save(outdir / 'congress_gray.pdf')
Add manual experiment that replaces a RGB image with grayscale
Add manual experiment that replaces a RGB image with grayscale
Python
mpl-2.0
pikepdf/pikepdf,pikepdf/pikepdf,pikepdf/pikepdf
python
## Code Before: import pytest import imghdr from io import BytesIO from PIL import Image from pikepdf import _qpdf as qpdf def test_jpeg(resources, outdir): pdf = qpdf.Pdf.open(resources / 'congress.pdf') # If you are looking at this as example code, Im0 is not necessarily the # name of any image. pdfimage = pdf.pages[0].Resources.XObject.Im0 raw_stream = pdf.pages[0].Resources.XObject.Im0.read_raw_stream() with pytest.raises(RuntimeError): pdf.pages[0].Resources.XObject.Im0.read_stream() assert imghdr.what('', h=raw_stream) == 'jpeg' im = Image.open(BytesIO(raw_stream)) assert im.size == (pdfimage.Width, pdfimage.Height) assert im.mode == 'RGB' ## Instruction: Add manual experiment that replaces a RGB image with grayscale ## Code After: import pytest import imghdr from io import BytesIO from PIL import Image import zlib from pikepdf import Pdf, Object def test_jpeg(resources, outdir): pdf = Pdf.open(resources / 'congress.pdf') # If you are looking at this as example code, Im0 is not necessarily the # name of any image. pdfimage = pdf.pages[0].Resources.XObject['/Im0'] raw_bytes = pdfimage.read_raw_bytes() with pytest.raises(RuntimeError): pdfimage.read_bytes() assert imghdr.what('', h=raw_bytes) == 'jpeg' im = Image.open(BytesIO(raw_bytes)) assert im.size == (pdfimage.Width, pdfimage.Height) assert im.mode == 'RGB' def test_replace_jpeg(resources, outdir): pdf = Pdf.open(resources / 'congress.pdf') pdfimage = pdf.pages[0].Resources.XObject['/Im0'] raw_bytes = pdfimage.read_raw_bytes() im = Image.open(BytesIO(raw_bytes)) grayscale = im.convert('L') #newimage = Object.Stream(pdf, grayscale.tobytes()) pdfimage.write(zlib.compress(grayscale.tobytes()), Object.Name("/FlateDecode"), Object.Null()) pdfimage.ColorSpace = Object.Name('/DeviceGray') pdf.save(outdir / 'congress_gray.pdf')
# ... existing code ... import imghdr from io import BytesIO from PIL import Image import zlib from pikepdf import Pdf, Object def test_jpeg(resources, outdir): pdf = Pdf.open(resources / 'congress.pdf') # If you are looking at this as example code, Im0 is not necessarily the # name of any image. pdfimage = pdf.pages[0].Resources.XObject['/Im0'] raw_bytes = pdfimage.read_raw_bytes() with pytest.raises(RuntimeError): pdfimage.read_bytes() assert imghdr.what('', h=raw_bytes) == 'jpeg' im = Image.open(BytesIO(raw_bytes)) assert im.size == (pdfimage.Width, pdfimage.Height) assert im.mode == 'RGB' def test_replace_jpeg(resources, outdir): pdf = Pdf.open(resources / 'congress.pdf') pdfimage = pdf.pages[0].Resources.XObject['/Im0'] raw_bytes = pdfimage.read_raw_bytes() im = Image.open(BytesIO(raw_bytes)) grayscale = im.convert('L') #newimage = Object.Stream(pdf, grayscale.tobytes()) pdfimage.write(zlib.compress(grayscale.tobytes()), Object.Name("/FlateDecode"), Object.Null()) pdfimage.ColorSpace = Object.Name('/DeviceGray') pdf.save(outdir / 'congress_gray.pdf') # ... rest of the code ...
4b111c035f92f941cff4b6885d2fa01ddcb1657e
titanembeds/database/custom_redislite.py
titanembeds/database/custom_redislite.py
import urlparse from limits.storage import Storage from redislite import Redis import time class LimitsRedisLite(Storage): # For Python Limits STORAGE_SCHEME = "redislite" def __init__(self, uri, **options): self.redis_instance = Redis(urlparse.urlparse(uri).netloc) def check(self): return True def get_expiry(self, key): return self.redis_instance.ttl(key) def incr(self, key, expiry, elastic_expiry=False): if not self.redis_instance.exists(key): self.redis_instance.set(key, 1) self.redis_instance.expireat(key, int(time.time() + expiry)) else: oldexp = self.get_expiry(key) self.redis_instance.set(key, int(self.redis_instance.get(key))+1) self.redis_instance.expireat(key, int(time.time() + oldexp)) return def get(self, key): return int(self.redis_instance.get(key)) def reset(self): return self.redis_instance.flushdb()
import urlparse from limits.storage import Storage from redislite import Redis import time class LimitsRedisLite(Storage): # For Python Limits STORAGE_SCHEME = "redislite" def __init__(self, uri, **options): self.redis_instance = Redis(urlparse.urlparse(uri).netloc) def check(self): return True def get_expiry(self, key): return self.redis_instance.ttl(key) or 0 def incr(self, key, expiry, elastic_expiry=False): if not self.redis_instance.exists(key): self.redis_instance.set(key, 1, ex=expiry) else: oldexp = self.get_expiry(key) self.redis_instance.set(key, int(self.redis_instance.get(key))+1, ex=oldexp) return def get(self, key): return int(self.redis_instance.get(key)) def reset(self): return self.redis_instance.flushdb()
Fix custom redislite to account for the none errors
Fix custom redislite to account for the none errors
Python
agpl-3.0
TitanEmbeds/Titan,TitanEmbeds/Titan,TitanEmbeds/Titan
python
## Code Before: import urlparse from limits.storage import Storage from redislite import Redis import time class LimitsRedisLite(Storage): # For Python Limits STORAGE_SCHEME = "redislite" def __init__(self, uri, **options): self.redis_instance = Redis(urlparse.urlparse(uri).netloc) def check(self): return True def get_expiry(self, key): return self.redis_instance.ttl(key) def incr(self, key, expiry, elastic_expiry=False): if not self.redis_instance.exists(key): self.redis_instance.set(key, 1) self.redis_instance.expireat(key, int(time.time() + expiry)) else: oldexp = self.get_expiry(key) self.redis_instance.set(key, int(self.redis_instance.get(key))+1) self.redis_instance.expireat(key, int(time.time() + oldexp)) return def get(self, key): return int(self.redis_instance.get(key)) def reset(self): return self.redis_instance.flushdb() ## Instruction: Fix custom redislite to account for the none errors ## Code After: import urlparse from limits.storage import Storage from redislite import Redis import time class LimitsRedisLite(Storage): # For Python Limits STORAGE_SCHEME = "redislite" def __init__(self, uri, **options): self.redis_instance = Redis(urlparse.urlparse(uri).netloc) def check(self): return True def get_expiry(self, key): return self.redis_instance.ttl(key) or 0 def incr(self, key, expiry, elastic_expiry=False): if not self.redis_instance.exists(key): self.redis_instance.set(key, 1, ex=expiry) else: oldexp = self.get_expiry(key) self.redis_instance.set(key, int(self.redis_instance.get(key))+1, ex=oldexp) return def get(self, key): return int(self.redis_instance.get(key)) def reset(self): return self.redis_instance.flushdb()
# ... existing code ... return True def get_expiry(self, key): return self.redis_instance.ttl(key) or 0 def incr(self, key, expiry, elastic_expiry=False): if not self.redis_instance.exists(key): self.redis_instance.set(key, 1, ex=expiry) else: oldexp = self.get_expiry(key) self.redis_instance.set(key, int(self.redis_instance.get(key))+1, ex=oldexp) return def get(self, key): # ... rest of the code ...
2db14ce8b5b5b47089678da1e02ff169c7a86f55
ifs-web-service/ifs-project-setup-mgt-service/src/main/java/org/innovateuk/ifs/project/grantofferletter/viewmodel/OtherCostsRowModel.java
ifs-web-service/ifs-project-setup-mgt-service/src/main/java/org/innovateuk/ifs/project/grantofferletter/viewmodel/OtherCostsRowModel.java
package org.innovateuk.ifs.project.grantofferletter.viewmodel; import java.math.BigDecimal; import java.util.List; import java.util.Map; import static java.util.Collections.singletonList; /* * Holder of values for the other costs rows on GOL finance tables, which are handled differently */ public class OtherCostsRowModel { private final String otherCostName; private Map<String, List<BigDecimal>> otherCostValues; public OtherCostsRowModel(String otherCostName, Map<String, List<BigDecimal>> otherCostValues) { this.otherCostName = otherCostName; this.otherCostValues = otherCostValues; } public String getOtherCostName() { return otherCostName; } public Map<String, List<BigDecimal>> getOtherCostValues() { return otherCostValues; } public void addToCostValues(String orgName, BigDecimal cost) { if(otherCostValues.keySet().contains(orgName)) { otherCostValues.get(orgName).add(cost); } else { otherCostValues.put(orgName, singletonList(cost)); } } }
package org.innovateuk.ifs.project.grantofferletter.viewmodel; import java.math.BigDecimal; import java.util.Collection; import java.util.List; import java.util.Map; import static java.util.Collections.singletonList; /* * Holder of values for the other costs rows on GOL finance tables, which are handled differently */ public class OtherCostsRowModel { private final String otherCostName; private Map<String, List<BigDecimal>> otherCostValues; public OtherCostsRowModel(String otherCostName, Map<String, List<BigDecimal>> otherCostValues) { this.otherCostName = otherCostName; this.otherCostValues = otherCostValues; } public String getOtherCostName() { return otherCostName; } public Map<String, List<BigDecimal>> getOtherCostValues() { return otherCostValues; } public void addToCostValues(String orgName, BigDecimal cost) { if(otherCostValues.keySet().contains(orgName)) { otherCostValues.get(orgName).add(cost); } else { otherCostValues.put(orgName, singletonList(cost)); } } public Collection<String> getOrganisations() { return otherCostValues.keySet(); } public BigDecimal getCostValuesForOrg(String organisation) { return otherCostValues.get(organisation) .stream() .reduce(BigDecimal.ZERO, BigDecimal::add); } }
Add helper methods to other costs rows
IFS-5895: Add helper methods to other costs rows
Java
mit
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
java
## Code Before: package org.innovateuk.ifs.project.grantofferletter.viewmodel; import java.math.BigDecimal; import java.util.List; import java.util.Map; import static java.util.Collections.singletonList; /* * Holder of values for the other costs rows on GOL finance tables, which are handled differently */ public class OtherCostsRowModel { private final String otherCostName; private Map<String, List<BigDecimal>> otherCostValues; public OtherCostsRowModel(String otherCostName, Map<String, List<BigDecimal>> otherCostValues) { this.otherCostName = otherCostName; this.otherCostValues = otherCostValues; } public String getOtherCostName() { return otherCostName; } public Map<String, List<BigDecimal>> getOtherCostValues() { return otherCostValues; } public void addToCostValues(String orgName, BigDecimal cost) { if(otherCostValues.keySet().contains(orgName)) { otherCostValues.get(orgName).add(cost); } else { otherCostValues.put(orgName, singletonList(cost)); } } } ## Instruction: IFS-5895: Add helper methods to other costs rows ## Code After: package org.innovateuk.ifs.project.grantofferletter.viewmodel; import java.math.BigDecimal; import java.util.Collection; import java.util.List; import java.util.Map; import static java.util.Collections.singletonList; /* * Holder of values for the other costs rows on GOL finance tables, which are handled differently */ public class OtherCostsRowModel { private final String otherCostName; private Map<String, List<BigDecimal>> otherCostValues; public OtherCostsRowModel(String otherCostName, Map<String, List<BigDecimal>> otherCostValues) { this.otherCostName = otherCostName; this.otherCostValues = otherCostValues; } public String getOtherCostName() { return otherCostName; } public Map<String, List<BigDecimal>> getOtherCostValues() { return otherCostValues; } public void addToCostValues(String orgName, BigDecimal cost) { if(otherCostValues.keySet().contains(orgName)) { otherCostValues.get(orgName).add(cost); } else { otherCostValues.put(orgName, singletonList(cost)); } } public Collection<String> getOrganisations() { return otherCostValues.keySet(); } public BigDecimal getCostValuesForOrg(String organisation) { return otherCostValues.get(organisation) .stream() .reduce(BigDecimal.ZERO, BigDecimal::add); } }
... package org.innovateuk.ifs.project.grantofferletter.viewmodel; import java.math.BigDecimal; import java.util.Collection; import java.util.List; import java.util.Map; ... otherCostValues.put(orgName, singletonList(cost)); } } public Collection<String> getOrganisations() { return otherCostValues.keySet(); } public BigDecimal getCostValuesForOrg(String organisation) { return otherCostValues.get(organisation) .stream() .reduce(BigDecimal.ZERO, BigDecimal::add); } } ...
6413ce937fbdfdf1acc5cffab4f01f0b40fb2cfc
views.py
views.py
from flask import Flask, render_template, url_for, Markup from flask.ext.libsass import * import pkg_resources import markdown app=Flask(__name__) Sass( {'app': 'scss/app.scss'}, app, url_path='/static/css', include_paths=[pkg_resources.resource_filename('views', 'scss')], output_style='compressed' ) @app.route('/<page>') def get_page(page): md=open(pkg_resources.resource_filename('views', 'pages/' + page + '.md'), encoding='UTF-8') html=Markup(markdown.markdown(md.read(), output_format='html5')) md.close() if page=='index': return render_template('page.html', content=html) return render_template('page.html', content=html, title=page) @app.route('/') def index(): return get_page('index') if __name__=='__main__': app.run()
from flask import Flask, render_template, url_for, Markup, abort from flask.ext.libsass import * import pkg_resources import markdown app=Flask(__name__) Sass( {'app': 'scss/app.scss'}, app, url_path='/static/css', include_paths=[pkg_resources.resource_filename('views', 'scss')], output_style='compressed' ) @app.route('/<page>') def get_page(page): try: md=open(pkg_resources.resource_filename('views', 'pages/' + page + '.md'), encoding='UTF-8') html=Markup(markdown.markdown(md.read(), output_format='html5')) md.close() if page=='index': return render_template('page.html', content=html) return render_template('page.html', content=html, title=page) except OSError: abort(404) @app.route('/') def index(): return get_page('index') if __name__=='__main__': app.run()
Add basic page request exception handling
Add basic page request exception handling
Python
mpl-2.0
vishwin/vishwin.info-http,vishwin/vishwin.info-http,vishwin/vishwin.info-http
python
## Code Before: from flask import Flask, render_template, url_for, Markup from flask.ext.libsass import * import pkg_resources import markdown app=Flask(__name__) Sass( {'app': 'scss/app.scss'}, app, url_path='/static/css', include_paths=[pkg_resources.resource_filename('views', 'scss')], output_style='compressed' ) @app.route('/<page>') def get_page(page): md=open(pkg_resources.resource_filename('views', 'pages/' + page + '.md'), encoding='UTF-8') html=Markup(markdown.markdown(md.read(), output_format='html5')) md.close() if page=='index': return render_template('page.html', content=html) return render_template('page.html', content=html, title=page) @app.route('/') def index(): return get_page('index') if __name__=='__main__': app.run() ## Instruction: Add basic page request exception handling ## Code After: from flask import Flask, render_template, url_for, Markup, abort from flask.ext.libsass import * import pkg_resources import markdown app=Flask(__name__) Sass( {'app': 'scss/app.scss'}, app, url_path='/static/css', include_paths=[pkg_resources.resource_filename('views', 'scss')], output_style='compressed' ) @app.route('/<page>') def get_page(page): try: md=open(pkg_resources.resource_filename('views', 'pages/' + page + '.md'), encoding='UTF-8') html=Markup(markdown.markdown(md.read(), output_format='html5')) md.close() if page=='index': return render_template('page.html', content=html) return render_template('page.html', content=html, title=page) except OSError: abort(404) @app.route('/') def index(): return get_page('index') if __name__=='__main__': app.run()
... from flask import Flask, render_template, url_for, Markup, abort from flask.ext.libsass import * import pkg_resources import markdown ... @app.route('/<page>') def get_page(page): try: md=open(pkg_resources.resource_filename('views', 'pages/' + page + '.md'), encoding='UTF-8') html=Markup(markdown.markdown(md.read(), output_format='html5')) md.close() if page=='index': return render_template('page.html', content=html) return render_template('page.html', content=html, title=page) except OSError: abort(404) @app.route('/') def index(): ...
356dd5294280db3334f86354202f0d68881254b9
joerd/check.py
joerd/check.py
import zipfile import tarfile import shutil import tempfile from osgeo import gdal def is_zip(tmp): """ Returns True if the NamedTemporaryFile given as the argument appears to be a well-formed Zip file. """ try: zip_file = zipfile.ZipFile(tmp.name, 'r') test_result = zip_file.testzip() return test_result is None except: pass return False def tar_gz_has_gdal(member_name): """ Returns a function which, when called with a NamedTemporaryFile, returns True if that file is a GZip-encoded TAR file containing a `member_name` member which can be opened with GDAL. """ def func(tmp): try: tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2) with tempfile.NamedTemporaryFile() as tmp_member: shutil.copyfileobj(tar.extractfile(member_name), tmp_member) return is_gdal(tmp_member) except (tarfile.TarError, IOError, OSError) as e: return False def is_gdal(tmp): """ Returns true if the NamedTemporaryFile given as the argument appears to be a well-formed GDAL raster file. """ try: ds = gdal.Open(tmp.name) band = ds.GetRasterBand(1) band.ComputeBandStats() return True except: pass return False
import zipfile import tarfile import shutil import tempfile from osgeo import gdal def is_zip(tmp): """ Returns True if the NamedTemporaryFile given as the argument appears to be a well-formed Zip file. """ try: zip_file = zipfile.ZipFile(tmp.name, 'r') test_result = zip_file.testzip() return test_result is None except: pass return False def tar_gz_has_gdal(member_name): """ Returns a function which, when called with a NamedTemporaryFile, returns True if that file is a GZip-encoded TAR file containing a `member_name` member which can be opened with GDAL. """ def func(tmp): try: tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2) with tempfile.NamedTemporaryFile() as tmp_member: shutil.copyfileobj(tar.extractfile(member_name), tmp_member) tmp_member.seek(0) return is_gdal(tmp_member) except (tarfile.TarError, IOError, OSError) as e: return False return func def is_gdal(tmp): """ Returns true if the NamedTemporaryFile given as the argument appears to be a well-formed GDAL raster file. """ try: ds = gdal.Open(tmp.name) band = ds.GetRasterBand(1) band.ComputeBandStats() return True except: pass return False
Return verifier function, not None. Also reset the temporary file to the beginning before verifying it.
Return verifier function, not None. Also reset the temporary file to the beginning before verifying it.
Python
mit
mapzen/joerd,tilezen/joerd
python
## Code Before: import zipfile import tarfile import shutil import tempfile from osgeo import gdal def is_zip(tmp): """ Returns True if the NamedTemporaryFile given as the argument appears to be a well-formed Zip file. """ try: zip_file = zipfile.ZipFile(tmp.name, 'r') test_result = zip_file.testzip() return test_result is None except: pass return False def tar_gz_has_gdal(member_name): """ Returns a function which, when called with a NamedTemporaryFile, returns True if that file is a GZip-encoded TAR file containing a `member_name` member which can be opened with GDAL. """ def func(tmp): try: tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2) with tempfile.NamedTemporaryFile() as tmp_member: shutil.copyfileobj(tar.extractfile(member_name), tmp_member) return is_gdal(tmp_member) except (tarfile.TarError, IOError, OSError) as e: return False def is_gdal(tmp): """ Returns true if the NamedTemporaryFile given as the argument appears to be a well-formed GDAL raster file. """ try: ds = gdal.Open(tmp.name) band = ds.GetRasterBand(1) band.ComputeBandStats() return True except: pass return False ## Instruction: Return verifier function, not None. Also reset the temporary file to the beginning before verifying it. ## Code After: import zipfile import tarfile import shutil import tempfile from osgeo import gdal def is_zip(tmp): """ Returns True if the NamedTemporaryFile given as the argument appears to be a well-formed Zip file. """ try: zip_file = zipfile.ZipFile(tmp.name, 'r') test_result = zip_file.testzip() return test_result is None except: pass return False def tar_gz_has_gdal(member_name): """ Returns a function which, when called with a NamedTemporaryFile, returns True if that file is a GZip-encoded TAR file containing a `member_name` member which can be opened with GDAL. """ def func(tmp): try: tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2) with tempfile.NamedTemporaryFile() as tmp_member: shutil.copyfileobj(tar.extractfile(member_name), tmp_member) tmp_member.seek(0) return is_gdal(tmp_member) except (tarfile.TarError, IOError, OSError) as e: return False return func def is_gdal(tmp): """ Returns true if the NamedTemporaryFile given as the argument appears to be a well-formed GDAL raster file. """ try: ds = gdal.Open(tmp.name) band = ds.GetRasterBand(1) band.ComputeBandStats() return True except: pass return False
// ... existing code ... tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2) with tempfile.NamedTemporaryFile() as tmp_member: shutil.copyfileobj(tar.extractfile(member_name), tmp_member) tmp_member.seek(0) return is_gdal(tmp_member) except (tarfile.TarError, IOError, OSError) as e: return False return func def is_gdal(tmp): // ... rest of the code ...
7450894dca96dee2749db6cef118dd898cedd98f
src/main/java/seedu/address/commons/events/model/AddressBookChangedEvent.java
src/main/java/seedu/address/commons/events/model/AddressBookChangedEvent.java
package seedu.address.commons.events.model; import seedu.address.commons.events.BaseEvent; import seedu.address.model.ReadOnlyAddressBook; /** Indicates the AddressBook in the model has changed*/ public class AddressBookChangedEvent extends BaseEvent { public final ReadOnlyAddressBook data; public AddressBookChangedEvent(ReadOnlyAddressBook data){ this.data = data; } @Override public String toString() { return "number of persons " + data.getPersonList().size() + ", number of tags " + data.getTagList().size(); } }
package seedu.address.commons.events.model; import seedu.address.commons.events.BaseEvent; import seedu.address.model.ReadOnlyAddressBook; /** Indicates the AddressBook in the model has changed*/ public class AddressBookChangedEvent extends BaseEvent { public final ReadOnlyAddressBook data; public AddressBookChangedEvent(ReadOnlyAddressBook data){ this.data = data; } @Override public String toString() { return "number of persons " + data.getListActivity().size() + ", number of tags " + data.getTagList().size(); } }
Update address book change event to activity
Update address book change event to activity
Java
mit
CS2103AUG2016-W14-C1/main
java
## Code Before: package seedu.address.commons.events.model; import seedu.address.commons.events.BaseEvent; import seedu.address.model.ReadOnlyAddressBook; /** Indicates the AddressBook in the model has changed*/ public class AddressBookChangedEvent extends BaseEvent { public final ReadOnlyAddressBook data; public AddressBookChangedEvent(ReadOnlyAddressBook data){ this.data = data; } @Override public String toString() { return "number of persons " + data.getPersonList().size() + ", number of tags " + data.getTagList().size(); } } ## Instruction: Update address book change event to activity ## Code After: package seedu.address.commons.events.model; import seedu.address.commons.events.BaseEvent; import seedu.address.model.ReadOnlyAddressBook; /** Indicates the AddressBook in the model has changed*/ public class AddressBookChangedEvent extends BaseEvent { public final ReadOnlyAddressBook data; public AddressBookChangedEvent(ReadOnlyAddressBook data){ this.data = data; } @Override public String toString() { return "number of persons " + data.getListActivity().size() + ", number of tags " + data.getTagList().size(); } }
# ... existing code ... @Override public String toString() { return "number of persons " + data.getListActivity().size() + ", number of tags " + data.getTagList().size(); } } # ... rest of the code ...
665d980f62840fc0c8e531cab21faf8151d016d7
setup.py
setup.py
from setuptools import setup, find_packages setup(name='pprof', version='0.9.6', packages=find_packages(), install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2", "regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6", "virtualenv==13.1.0", "sphinxcontrib-napoleon"], author="Andreas Simbuerger", author_email="[email protected]", description="This is the experiment driver for the pprof study", license="MIT", entry_points={ 'console_scripts': ['pprof=pprof.driver:main'] })
from setuptools import setup, find_packages setup(name='pprof', version='0.9.6', packages=find_packages(), install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2", "regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6", "virtualenv==13.1.0", "sphinxcontrib-napoleon", "psycopg2"], author="Andreas Simbuerger", author_email="[email protected]", description="This is the experiment driver for the pprof study", license="MIT", entry_points={ 'console_scripts': ['pprof=pprof.driver:main'] })
Add pyscopg2 to list of dependencies
Add pyscopg2 to list of dependencies Former-commit-id: afed58eea17319b11e3fafc1ef45c7cdf590fac0 Former-commit-id: 257ed272462ca52cc15bae9040296ace91e15843 [formerly 19b9a870795fb176a9fb49b427a00b70fc6e2b35] [formerly 5b2ece2b396282c63c2902d6128e3a1f2c982708 [formerly 1a662bf08f6e4b81939fe16c4422c7201c9394f5]] Former-commit-id: 2336bd3da92c144e041fba0c6d8a06f8c5beb9d3 [formerly e493e91f63a6989ea1b5c3add83dec0a8a4de16b] Former-commit-id: a778516564b1489efffbb822760e23ead82c4467
Python
mit
PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild
python
## Code Before: from setuptools import setup, find_packages setup(name='pprof', version='0.9.6', packages=find_packages(), install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2", "regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6", "virtualenv==13.1.0", "sphinxcontrib-napoleon"], author="Andreas Simbuerger", author_email="[email protected]", description="This is the experiment driver for the pprof study", license="MIT", entry_points={ 'console_scripts': ['pprof=pprof.driver:main'] }) ## Instruction: Add pyscopg2 to list of dependencies Former-commit-id: afed58eea17319b11e3fafc1ef45c7cdf590fac0 Former-commit-id: 257ed272462ca52cc15bae9040296ace91e15843 [formerly 19b9a870795fb176a9fb49b427a00b70fc6e2b35] [formerly 5b2ece2b396282c63c2902d6128e3a1f2c982708 [formerly 1a662bf08f6e4b81939fe16c4422c7201c9394f5]] Former-commit-id: 2336bd3da92c144e041fba0c6d8a06f8c5beb9d3 [formerly e493e91f63a6989ea1b5c3add83dec0a8a4de16b] Former-commit-id: a778516564b1489efffbb822760e23ead82c4467 ## Code After: from setuptools import setup, find_packages setup(name='pprof', version='0.9.6', packages=find_packages(), install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2", "regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6", "virtualenv==13.1.0", "sphinxcontrib-napoleon", "psycopg2"], author="Andreas Simbuerger", author_email="[email protected]", description="This is the experiment driver for the pprof study", license="MIT", entry_points={ 'console_scripts': ['pprof=pprof.driver:main'] })
# ... existing code ... packages=find_packages(), install_requires=["SQLAlchemy==1.0.4", "cloud==2.8.5", "plumbum==1.4.2", "regex==2015.5.28", "wheel==0.24.0", "parse==1.6.6", "virtualenv==13.1.0", "sphinxcontrib-napoleon", "psycopg2"], author="Andreas Simbuerger", author_email="[email protected]", description="This is the experiment driver for the pprof study", # ... rest of the code ...
fad05f1e3814c65feccb1f40ee4d4408a7c40709
src/cafe/image/tiff/AbstractRationalField.java
src/cafe/image/tiff/AbstractRationalField.java
/** * Copyright (c) 2014 by Wen Yu. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Any modifications to this file must keep this entire header intact. */ package cafe.image.tiff; import java.io.IOException; import cafe.io.RandomAccessOutputStream; public abstract class AbstractRationalField extends TiffField<int[]> { public AbstractRationalField(short tag, FieldType fieldType, int[] data) { super(tag, fieldType, data.length>>1); this.data = data; } protected int writeData(RandomAccessOutputStream os, int toOffset) throws IOException { // dataOffset = toOffset; os.writeInt(toOffset); os.seek(toOffset); for (int value : data) os.writeInt(value); toOffset += (data.length << 2); return toOffset; } }
/** * Copyright (c) 2014 by Wen Yu. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Any modifications to this file must keep this entire header intact. */ package cafe.image.tiff; import java.io.IOException; import cafe.io.RandomAccessOutputStream; public abstract class AbstractRationalField extends TiffField<int[]> { public AbstractRationalField(short tag, FieldType fieldType, int[] data) { super(tag, fieldType, data.length>>1); this.data = data; } public int[] getDataAsLong() { return data; } protected int writeData(RandomAccessOutputStream os, int toOffset) throws IOException { // dataOffset = toOffset; os.writeInt(toOffset); os.seek(toOffset); for (int value : data) os.writeInt(value); toOffset += (data.length << 2); return toOffset; } }
Allow to call getDataAsLong for Rational type field
Allow to call getDataAsLong for Rational type field
Java
epl-1.0
dragon66/icafe,dragon66/icafe,dragon66/icafe,nagyistoce/icafe
java
## Code Before: /** * Copyright (c) 2014 by Wen Yu. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Any modifications to this file must keep this entire header intact. */ package cafe.image.tiff; import java.io.IOException; import cafe.io.RandomAccessOutputStream; public abstract class AbstractRationalField extends TiffField<int[]> { public AbstractRationalField(short tag, FieldType fieldType, int[] data) { super(tag, fieldType, data.length>>1); this.data = data; } protected int writeData(RandomAccessOutputStream os, int toOffset) throws IOException { // dataOffset = toOffset; os.writeInt(toOffset); os.seek(toOffset); for (int value : data) os.writeInt(value); toOffset += (data.length << 2); return toOffset; } } ## Instruction: Allow to call getDataAsLong for Rational type field ## Code After: /** * Copyright (c) 2014 by Wen Yu. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Any modifications to this file must keep this entire header intact. */ package cafe.image.tiff; import java.io.IOException; import cafe.io.RandomAccessOutputStream; public abstract class AbstractRationalField extends TiffField<int[]> { public AbstractRationalField(short tag, FieldType fieldType, int[] data) { super(tag, fieldType, data.length>>1); this.data = data; } public int[] getDataAsLong() { return data; } protected int writeData(RandomAccessOutputStream os, int toOffset) throws IOException { // dataOffset = toOffset; os.writeInt(toOffset); os.seek(toOffset); for (int value : data) os.writeInt(value); toOffset += (data.length << 2); return toOffset; } }
// ... existing code ... super(tag, fieldType, data.length>>1); this.data = data; } public int[] getDataAsLong() { return data; } protected int writeData(RandomAccessOutputStream os, int toOffset) throws IOException { // // ... rest of the code ...
2f03aa69f55d4d899af968e57a59a58d27ef82c8
url_shortener/forms.py
url_shortener/forms.py
from flask_wtf import Form from flask_wtf.recaptcha import RecaptchaField, Recaptcha from wtforms import StringField, validators from .validation import not_blacklisted_nor_spam class ShortenedURLForm(Form): url = StringField( validators=[ validators.DataRequired(), validators.URL(message='A valid URL is required'), not_blacklisted_nor_spam ] ) recaptcha = RecaptchaField( validators=[ Recaptcha( 'Please click on the reCAPTCHA field to prove you are a human' ) ] )
from flask_wtf import Form from flask_wtf.recaptcha import RecaptchaField, Recaptcha from wtforms import StringField, validators from .validation import not_blacklisted_nor_spam class ShortenedURLForm(Form): url = StringField( validators=[ validators.DataRequired(), validators.URL(message='A valid URL is required'), not_blacklisted_nor_spam ], render_kw={'placeholder': 'Original URL'} ) recaptcha = RecaptchaField( validators=[ Recaptcha( 'Please click on the reCAPTCHA field to prove you are a human' ) ] )
Add placeholder text to URL form field
Add placeholder text to URL form field
Python
mit
piotr-rusin/url-shortener,piotr-rusin/url-shortener
python
## Code Before: from flask_wtf import Form from flask_wtf.recaptcha import RecaptchaField, Recaptcha from wtforms import StringField, validators from .validation import not_blacklisted_nor_spam class ShortenedURLForm(Form): url = StringField( validators=[ validators.DataRequired(), validators.URL(message='A valid URL is required'), not_blacklisted_nor_spam ] ) recaptcha = RecaptchaField( validators=[ Recaptcha( 'Please click on the reCAPTCHA field to prove you are a human' ) ] ) ## Instruction: Add placeholder text to URL form field ## Code After: from flask_wtf import Form from flask_wtf.recaptcha import RecaptchaField, Recaptcha from wtforms import StringField, validators from .validation import not_blacklisted_nor_spam class ShortenedURLForm(Form): url = StringField( validators=[ validators.DataRequired(), validators.URL(message='A valid URL is required'), not_blacklisted_nor_spam ], render_kw={'placeholder': 'Original URL'} ) recaptcha = RecaptchaField( validators=[ Recaptcha( 'Please click on the reCAPTCHA field to prove you are a human' ) ] )
... validators.DataRequired(), validators.URL(message='A valid URL is required'), not_blacklisted_nor_spam ], render_kw={'placeholder': 'Original URL'} ) recaptcha = RecaptchaField( validators=[ ...
874ead2ed9de86eea20c4a67ce7b53cb2766c09e
erpnext/patches/v5_0/link_warehouse_with_account.py
erpnext/patches/v5_0/link_warehouse_with_account.py
from __future__ import unicode_literals import frappe def execute(): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
from __future__ import unicode_literals import frappe def execute(): if "master_name" in frappe.db.get_table_columns("Account"): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
Update warehouse as per master_name if master_name exists
Update warehouse as per master_name if master_name exists
Python
agpl-3.0
indictranstech/fbd_erpnext,gangadharkadam/saloon_erp_install,mbauskar/helpdesk-erpnext,gmarke/erpnext,Tejal011089/paypal_erpnext,Tejal011089/trufil-erpnext,treejames/erpnext,indictranstech/reciphergroup-erpnext,pombredanne/erpnext,gangadharkadam/saloon_erp,gangadharkadam/vlinkerp,hatwar/buyback-erpnext,shft117/SteckerApp,Drooids/erpnext,treejames/erpnext,mbauskar/omnitech-erpnext,susuchina/ERPNEXT,gmarke/erpnext,shft117/SteckerApp,mbauskar/alec_frappe5_erpnext,indictranstech/reciphergroup-erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/fbd_erpnext,Tejal011089/fbd_erpnext,sheafferusa/erpnext,mbauskar/alec_frappe5_erpnext,fuhongliang/erpnext,geekroot/erpnext,mahabuber/erpnext,hatwar/buyback-erpnext,saurabh6790/test-erp,gangadharkadam/saloon_erp,Tejal011089/osmosis_erpnext,mbauskar/Das_Erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/contributionerp,mbauskar/helpdesk-erpnext,meisterkleister/erpnext,indictranstech/fbd_erpnext,SPKian/Testing2,hanselke/erpnext-1,sheafferusa/erpnext,hatwar/Das_erpnext,indictranstech/trufil-erpnext,mbauskar/omnitech-erpnext,anandpdoshi/erpnext,hatwar/buyback-erpnext,Tejal011089/osmosis_erpnext,susuchina/ERPNEXT,gangadharkadam/vlinkerp,mbauskar/helpdesk-erpnext,indictranstech/tele-erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,tmimori/erpnext,Aptitudetech/ERPNext,netfirms/erpnext,gangadharkadam/contributionerp,netfirms/erpnext,rohitwaghchaure/GenieManager-erpnext,ShashaQin/erpnext,pombredanne/erpnext,SPKian/Testing,hanselke/erpnext-1,hernad/erpnext,mbauskar/sapphire-erpnext,hernad/erpnext,mahabuber/erpnext,anandpdoshi/erpnext,Tejal011089/osmosis_erpnext,hanselke/erpnext-1,susuchina/ERPNEXT,rohitwaghchaure/erpnext-receipher,indictranstech/reciphergroup-erpnext,MartinEnder/erpnext-de,Tejal011089/huntercamp_erpnext,ThiagoGarciaAlves/erpnext,shft117/SteckerApp,rohitwaghchaure/GenieManager-erpnext,SPKian/Testing2,shitolepriya/test-erp,saurabh6790/test-erp,Drooids/erpnext,njmube/erpnext,pombredanne/erpnext,gsnbng/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/v6_erp,indictranstech/biggift-erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/tele-erpnext,ShashaQin/erpnext,gangadharkadam/saloon_erp_install,tmimori/erpnext,Tejal011089/huntercamp_erpnext,Tejal011089/paypal_erpnext,indictranstech/erpnext,njmube/erpnext,mbauskar/Das_Erpnext,ThiagoGarciaAlves/erpnext,hatwar/Das_erpnext,gangadhar-kadam/helpdesk-erpnext,hernad/erpnext,rohitwaghchaure/GenieManager-erpnext,Drooids/erpnext,indictranstech/reciphergroup-erpnext,shitolepriya/test-erp,fuhongliang/erpnext,dieface/erpnext,indictranstech/osmosis-erpnext,gangadharkadam/contributionerp,Tejal011089/huntercamp_erpnext,mahabuber/erpnext,mbauskar/Das_Erpnext,gmarke/erpnext,indictranstech/tele-erpnext,saurabh6790/test-erp,pombredanne/erpnext,Suninus/erpnext,ShashaQin/erpnext,sheafferusa/erpnext,treejames/erpnext,SPKian/Testing,fuhongliang/erpnext,indictranstech/fbd_erpnext,mahabuber/erpnext,gangadharkadam/saloon_erp_install,MartinEnder/erpnext-de,Suninus/erpnext,ThiagoGarciaAlves/erpnext,Tejal011089/trufil-erpnext,MartinEnder/erpnext-de,Suninus/erpnext,indictranstech/erpnext,rohitwaghchaure/erpnext-receipher,mbauskar/helpdesk-erpnext,indictranstech/biggift-erpnext,indictranstech/Das_Erpnext,gangadharkadam/v6_erp,gmarke/erpnext,gsnbng/erpnext,sagar30051991/ozsmart-erp,indictranstech/erpnext,geekroot/erpnext,susuchina/ERPNEXT,netfirms/erpnext,dieface/erpnext,SPKian/Testing,indictranstech/fbd_erpnext,treejames/erpnext,tmimori/erpnext,gangadharkadam/saloon_erp,indictranstech/tele-erpnext,hatwar/Das_erpnext,aruizramon/alec_erpnext,mbauskar/sapphire-erpnext,ThiagoGarciaAlves/erpnext,mbauskar/omnitech-erpnext,Tejal011089/osmosis_erpnext,Tejal011089/huntercamp_erpnext,indictranstech/osmosis-erpnext,fuhongliang/erpnext,gangadharkadam/v6_erp,gangadharkadam/vlinkerp,aruizramon/alec_erpnext,indictranstech/osmosis-erpnext,SPKian/Testing,sheafferusa/erpnext,gangadhar-kadam/helpdesk-erpnext,Tejal011089/trufil-erpnext,indictranstech/trufil-erpnext,anandpdoshi/erpnext,indictranstech/Das_Erpnext,gangadhar-kadam/helpdesk-erpnext,netfirms/erpnext,mbauskar/sapphire-erpnext,gangadharkadam/saloon_erp_install,dieface/erpnext,aruizramon/alec_erpnext,shft117/SteckerApp,sagar30051991/ozsmart-erp,gangadharkadam/vlinkerp,saurabh6790/test-erp,Drooids/erpnext,indictranstech/biggift-erpnext,SPKian/Testing2,Suninus/erpnext,gsnbng/erpnext,indictranstech/trufil-erpnext,mbauskar/sapphire-erpnext,Tejal011089/fbd_erpnext,ShashaQin/erpnext,shitolepriya/test-erp,njmube/erpnext,MartinEnder/erpnext-de,Tejal011089/paypal_erpnext,aruizramon/alec_erpnext,tmimori/erpnext,Tejal011089/fbd_erpnext,indictranstech/trufil-erpnext,mbauskar/omnitech-demo-erpnext,gangadhar-kadam/helpdesk-erpnext,sagar30051991/ozsmart-erp,indictranstech/Das_Erpnext,hatwar/buyback-erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,rohitwaghchaure/erpnext-receipher,meisterkleister/erpnext,hanselke/erpnext-1,gangadharkadam/saloon_erp,sagar30051991/ozsmart-erp,mbauskar/alec_frappe5_erpnext,SPKian/Testing2,mbauskar/omnitech-demo-erpnext,geekroot/erpnext,njmube/erpnext,meisterkleister/erpnext,hatwar/Das_erpnext,hernad/erpnext,indictranstech/erpnext,gsnbng/erpnext,gangadharkadam/contributionerp,indictranstech/biggift-erpnext,meisterkleister/erpnext,gangadharkadam/v6_erp,shitolepriya/test-erp,geekroot/erpnext,Tejal011089/paypal_erpnext,dieface/erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/trufil-erpnext,rohitwaghchaure/erpnext-receipher
python
## Code Before: from __future__ import unicode_literals import frappe def execute(): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""") ## Instruction: Update warehouse as per master_name if master_name exists ## Code After: from __future__ import unicode_literals import frappe def execute(): if "master_name" in frappe.db.get_table_columns("Account"): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
... import frappe def execute(): if "master_name" in frappe.db.get_table_columns("Account"): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""") ...
b3ee386a185c0e97967506b17ff6555668716b02
src/qt/guiconstants.h
src/qt/guiconstants.h
/* Milliseconds between model updates */ static const int MODEL_UPDATE_DELAY = 500; /* Maximum passphrase length */ static const int MAX_PASSPHRASE_SIZE = 1024; /* Size of icons in status bar */ static const int STATUSBAR_ICONSIZE = 16; /* Invalid field background style */ #define STYLE_INVALID "background:#FF8080" /* Transaction list -- unconfirmed transaction */ #define COLOR_UNCONFIRMED QColor(128, 128, 128) /* Transaction list -- negative amount */ #define COLOR_NEGATIVE QColor(136, 0, 21) /* Table List -- negative amount */ #define COLOR_NEGATIVE_TABLE QColor(224, 0, 0) /* Transaction list -- positive amount */ #define COLOR_POSITIVE QColor(0, 128, 0) /* Transaction list -- bare address (without label) */ #define COLOR_BAREADDRESS QColor(140, 140, 140) /* Colors for minting tab for each coin age group */ #define COLOR_MINT_YOUNG QColor(127, 127, 240) #define COLOR_MINT_MATURE QColor(127, 240, 127) #define COLOR_MINT_OLD QColor(240, 127, 127) #endif // GUICONSTANTS_H
/* Milliseconds between model updates */ static const int MODEL_UPDATE_DELAY = 500; /* Maximum passphrase length */ static const int MAX_PASSPHRASE_SIZE = 1024; /* Size of icons in status bar */ static const int STATUSBAR_ICONSIZE = 16; /* Invalid field background style */ #define STYLE_INVALID "background:#FF8080" /* Transaction list -- unconfirmed transaction */ #define COLOR_UNCONFIRMED QColor(128, 128, 128) /* Transaction list -- negative amount */ #define COLOR_NEGATIVE QColor(136, 0, 21) /* Table List -- negative amount */ #define COLOR_NEGATIVE_TABLE QColor(255, 80, 80) /* Transaction list -- positive amount */ #define COLOR_POSITIVE QColor(0, 128, 0) /* Transaction list -- bare address (without label) */ #define COLOR_BAREADDRESS QColor(140, 140, 140) /* Colors for minting tab for each coin age group */ #define COLOR_MINT_YOUNG QColor(128, 64, 64) #define COLOR_MINT_MATURE QColor(64, 128, 64) #define COLOR_MINT_OLD QColor(26, 77, 26) #endif // GUICONSTANTS_H
Improve colors of minting tab and transactions tab.
Improve colors of minting tab and transactions tab.
C
mit
Peerunity/Peerunity,Peerunity/Peerunity,Peerunity/Peerunity,Peerunity/Peerunity
c
## Code Before: /* Milliseconds between model updates */ static const int MODEL_UPDATE_DELAY = 500; /* Maximum passphrase length */ static const int MAX_PASSPHRASE_SIZE = 1024; /* Size of icons in status bar */ static const int STATUSBAR_ICONSIZE = 16; /* Invalid field background style */ #define STYLE_INVALID "background:#FF8080" /* Transaction list -- unconfirmed transaction */ #define COLOR_UNCONFIRMED QColor(128, 128, 128) /* Transaction list -- negative amount */ #define COLOR_NEGATIVE QColor(136, 0, 21) /* Table List -- negative amount */ #define COLOR_NEGATIVE_TABLE QColor(224, 0, 0) /* Transaction list -- positive amount */ #define COLOR_POSITIVE QColor(0, 128, 0) /* Transaction list -- bare address (without label) */ #define COLOR_BAREADDRESS QColor(140, 140, 140) /* Colors for minting tab for each coin age group */ #define COLOR_MINT_YOUNG QColor(127, 127, 240) #define COLOR_MINT_MATURE QColor(127, 240, 127) #define COLOR_MINT_OLD QColor(240, 127, 127) #endif // GUICONSTANTS_H ## Instruction: Improve colors of minting tab and transactions tab. ## Code After: /* Milliseconds between model updates */ static const int MODEL_UPDATE_DELAY = 500; /* Maximum passphrase length */ static const int MAX_PASSPHRASE_SIZE = 1024; /* Size of icons in status bar */ static const int STATUSBAR_ICONSIZE = 16; /* Invalid field background style */ #define STYLE_INVALID "background:#FF8080" /* Transaction list -- unconfirmed transaction */ #define COLOR_UNCONFIRMED QColor(128, 128, 128) /* Transaction list -- negative amount */ #define COLOR_NEGATIVE QColor(136, 0, 21) /* Table List -- negative amount */ #define COLOR_NEGATIVE_TABLE QColor(255, 80, 80) /* Transaction list -- positive amount */ #define COLOR_POSITIVE QColor(0, 128, 0) /* Transaction list -- bare address (without label) */ #define COLOR_BAREADDRESS QColor(140, 140, 140) /* Colors for minting tab for each coin age group */ #define COLOR_MINT_YOUNG QColor(128, 64, 64) #define COLOR_MINT_MATURE QColor(64, 128, 64) #define COLOR_MINT_OLD QColor(26, 77, 26) #endif // GUICONSTANTS_H
// ... existing code ... /* Transaction list -- negative amount */ #define COLOR_NEGATIVE QColor(136, 0, 21) /* Table List -- negative amount */ #define COLOR_NEGATIVE_TABLE QColor(255, 80, 80) /* Transaction list -- positive amount */ #define COLOR_POSITIVE QColor(0, 128, 0) /* Transaction list -- bare address (without label) */ // ... modified code ... #define COLOR_BAREADDRESS QColor(140, 140, 140) /* Colors for minting tab for each coin age group */ #define COLOR_MINT_YOUNG QColor(128, 64, 64) #define COLOR_MINT_MATURE QColor(64, 128, 64) #define COLOR_MINT_OLD QColor(26, 77, 26) #endif // GUICONSTANTS_H // ... rest of the code ...
d850f4785340f73a417653f46c4de275a6eeeb8c
utilities/ticker-update.py
utilities/ticker-update.py
import requests from bs4 import BeautifulSoup URL = 'https://finance.yahoo.com/quote/' securities = ['bgcp', 'cvx', 'f', 'ge', 'intc', 'lumn', 'src', 't'] for security in securities: query = URL + security page = requests.get(query) soup = BeautifulSoup(page.content, 'html.parser') span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"}) price = span.get_text() table_row = soup.select('table td') open = table_row[3].text print(f"{security:>6}: {open:<6} {price:<6}")
import requests from bs4 import BeautifulSoup URL = 'https://finance.yahoo.com/quote/' secutities = [] with open("ticker-updates,cong", r) as conf_file: securities = conf_file.readlines() securities = [s.strip() for s in securities] for security in securities: query = URL + security page = requests.get(query) soup = BeautifulSoup(page.content, 'html.parser') span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"}) price = span.get_text() table_row = soup.select('table td') open = table_row[3].text print(f"{security:>6}: {open:<6} {price:<6}")
Read securities from conf file
Read securities from conf file
Python
mit
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
python
## Code Before: import requests from bs4 import BeautifulSoup URL = 'https://finance.yahoo.com/quote/' securities = ['bgcp', 'cvx', 'f', 'ge', 'intc', 'lumn', 'src', 't'] for security in securities: query = URL + security page = requests.get(query) soup = BeautifulSoup(page.content, 'html.parser') span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"}) price = span.get_text() table_row = soup.select('table td') open = table_row[3].text print(f"{security:>6}: {open:<6} {price:<6}") ## Instruction: Read securities from conf file ## Code After: import requests from bs4 import BeautifulSoup URL = 'https://finance.yahoo.com/quote/' secutities = [] with open("ticker-updates,cong", r) as conf_file: securities = conf_file.readlines() securities = [s.strip() for s in securities] for security in securities: query = URL + security page = requests.get(query) soup = BeautifulSoup(page.content, 'html.parser') span = soup.find('span', {'class': "Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)"}) price = span.get_text() table_row = soup.select('table td') open = table_row[3].text print(f"{security:>6}: {open:<6} {price:<6}")
// ... existing code ... from bs4 import BeautifulSoup URL = 'https://finance.yahoo.com/quote/' secutities = [] with open("ticker-updates,cong", r) as conf_file: securities = conf_file.readlines() securities = [s.strip() for s in securities] for security in securities: query = URL + security // ... rest of the code ...
d2130b64c63bdcfdea854db39fb21c7efe0b24e1
tests/test_httpheader.py
tests/test_httpheader.py
import pytest pytestmark = pytest.mark.asyncio async def test_redirection(get_version): assert await get_version("jmeter-plugins-manager", { "source": "httpheader", "url": "https://www.unifiedremote.com/download/linux-x64-deb", "regex": r'urserver-([\d.]+).deb', }) != None
import pytest pytestmark = pytest.mark.asyncio async def test_redirection(get_version): assert await get_version("unifiedremote", { "source": "httpheader", "url": "https://www.unifiedremote.com/download/linux-x64-deb", "regex": r'urserver-([\d.]+).deb', }) != None
Correct package name in httpheader test
Correct package name in httpheader test
Python
mit
lilydjwg/nvchecker
python
## Code Before: import pytest pytestmark = pytest.mark.asyncio async def test_redirection(get_version): assert await get_version("jmeter-plugins-manager", { "source": "httpheader", "url": "https://www.unifiedremote.com/download/linux-x64-deb", "regex": r'urserver-([\d.]+).deb', }) != None ## Instruction: Correct package name in httpheader test ## Code After: import pytest pytestmark = pytest.mark.asyncio async def test_redirection(get_version): assert await get_version("unifiedremote", { "source": "httpheader", "url": "https://www.unifiedremote.com/download/linux-x64-deb", "regex": r'urserver-([\d.]+).deb', }) != None
# ... existing code ... pytestmark = pytest.mark.asyncio async def test_redirection(get_version): assert await get_version("unifiedremote", { "source": "httpheader", "url": "https://www.unifiedremote.com/download/linux-x64-deb", "regex": r'urserver-([\d.]+).deb', # ... rest of the code ...
3fc94b4cffcfd08b439386fb2b01aa1e12fec6d5
iati/core/tests/test_data.py
iati/core/tests/test_data.py
"""A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" pass
"""A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" def test_dataset_no_params(self): """Test Dataset creation with no parameters.""" pass def test_dataset_valid_xml_string(self): """Test Dataset creation with a valid XML string that is not IATI data.""" pass def test_dataset_valid_iati_string(self): """Test Dataset creation with a valid IATI XML string.""" pass def test_dataset_invalid_xml_string(self): """Test Dataset creation with a string that is not valid XML.""" pass def test_dataset_tree(self): """Test Dataset creation with an etree that is not valid IATI data.""" pass def test_dataset_iati_tree(self): """Test Dataset creation with a valid IATI etree.""" pass def test_dataset_no_params_strict(self): """Test Dataset creation with no parameters. Strict IATI checks are enabled. """ pass def test_dataset_valid_xml_string_strict(self): """Test Dataset creation with a valid XML string that is not IATI data. Strict IATI checks are enabled. """ pass def test_dataset_valid_iati_string_strict(self): """Test Dataset creation with a valid IATI XML string. Strict IATI checks are enabled. """ pass def test_dataset_invalid_xml_string_strict(self): """Test Dataset creation with a string that is not valid XML. Strict IATI checks are enabled. """ pass def test_dataset_tree_strict(self): """Test Dataset creation with an etree that is not valid IATI data. Strict IATI checks are enabled. """ pass def test_dataset_iati_tree_strict(self): """Test Dataset creation with a valid IATI etree. Strict IATI checks are enabled. """ pass
Test stubs for dataset creation
Test stubs for dataset creation
Python
mit
IATI/iati.core,IATI/iati.core
python
## Code Before: """A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" pass ## Instruction: Test stubs for dataset creation ## Code After: """A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" def test_dataset_no_params(self): """Test Dataset creation with no parameters.""" pass def test_dataset_valid_xml_string(self): """Test Dataset creation with a valid XML string that is not IATI data.""" pass def test_dataset_valid_iati_string(self): """Test Dataset creation with a valid IATI XML string.""" pass def test_dataset_invalid_xml_string(self): """Test Dataset creation with a string that is not valid XML.""" pass def test_dataset_tree(self): """Test Dataset creation with an etree that is not valid IATI data.""" pass def test_dataset_iati_tree(self): """Test Dataset creation with a valid IATI etree.""" pass def test_dataset_no_params_strict(self): """Test Dataset creation with no parameters. Strict IATI checks are enabled. """ pass def test_dataset_valid_xml_string_strict(self): """Test Dataset creation with a valid XML string that is not IATI data. Strict IATI checks are enabled. """ pass def test_dataset_valid_iati_string_strict(self): """Test Dataset creation with a valid IATI XML string. Strict IATI checks are enabled. """ pass def test_dataset_invalid_xml_string_strict(self): """Test Dataset creation with a string that is not valid XML. Strict IATI checks are enabled. """ pass def test_dataset_tree_strict(self): """Test Dataset creation with an etree that is not valid IATI data. Strict IATI checks are enabled. """ pass def test_dataset_iati_tree_strict(self): """Test Dataset creation with a valid IATI etree. Strict IATI checks are enabled. """ pass
... class TestDatasets(object): """A container for tests relating to Datasets""" def test_dataset_no_params(self): """Test Dataset creation with no parameters.""" pass def test_dataset_valid_xml_string(self): """Test Dataset creation with a valid XML string that is not IATI data.""" pass def test_dataset_valid_iati_string(self): """Test Dataset creation with a valid IATI XML string.""" pass def test_dataset_invalid_xml_string(self): """Test Dataset creation with a string that is not valid XML.""" pass def test_dataset_tree(self): """Test Dataset creation with an etree that is not valid IATI data.""" pass def test_dataset_iati_tree(self): """Test Dataset creation with a valid IATI etree.""" pass def test_dataset_no_params_strict(self): """Test Dataset creation with no parameters. Strict IATI checks are enabled. """ pass def test_dataset_valid_xml_string_strict(self): """Test Dataset creation with a valid XML string that is not IATI data. Strict IATI checks are enabled. """ pass def test_dataset_valid_iati_string_strict(self): """Test Dataset creation with a valid IATI XML string. Strict IATI checks are enabled. """ pass def test_dataset_invalid_xml_string_strict(self): """Test Dataset creation with a string that is not valid XML. Strict IATI checks are enabled. """ pass def test_dataset_tree_strict(self): """Test Dataset creation with an etree that is not valid IATI data. Strict IATI checks are enabled. """ pass def test_dataset_iati_tree_strict(self): """Test Dataset creation with a valid IATI etree. Strict IATI checks are enabled. """ pass ...
b2e0cfe1f877507295abc574b128a0ccfe5d7f2b
annotation-rest/src/main/java/uk/ac/ebi/quickgo/annotation/download/converter/helpers/DateConverter.java
annotation-rest/src/main/java/uk/ac/ebi/quickgo/annotation/download/converter/helpers/DateConverter.java
package uk.ac.ebi.quickgo.annotation.download.converter.helpers; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Date; import java.util.function.Function; /** * A home for the logic to format dates into Strings. * * @author Tony Wardell * Date: 09/04/2018 * Time: 14:54 * Created with IntelliJ IDEA. */ public class DateConverter { private DateConverter() { } private static final DateTimeFormatter YYYYMMDD_DATE_FORMAT = DateTimeFormatter.ofPattern("yyyyMMdd"); public static final Function<java.util.Date, String> toYYYYMMDD = d -> d.toInstant().atZone(ZoneId.systemDefault()).format(YYYYMMDD_DATE_FORMAT); public static String toYearMonthDay(Date date) { return toYYYYMMDD.apply(date); } }
package uk.ac.ebi.quickgo.annotation.download.converter.helpers; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Date; import java.util.function.Function; /** * A home for the logic to format dates into Strings. * * @author Tony Wardell * Date: 09/04/2018 * Time: 14:54 * Created with IntelliJ IDEA. */ public class DateConverter { private DateConverter() { } private static final DateTimeFormatter YYYYMMDD_DATE_FORMAT = DateTimeFormatter.ofPattern("yyyyMMdd"); public static final Function<java.util.Date, String> toYYYYMMDD = d -> d.toInstant().atZone(ZoneId.systemDefault()).format(YYYYMMDD_DATE_FORMAT); public static String toYearMonthDay(Date date) { return date != null ? toYYYYMMDD.apply(date) : ""; } }
Add test for null date.
Add test for null date.
Java
apache-2.0
ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE
java
## Code Before: package uk.ac.ebi.quickgo.annotation.download.converter.helpers; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Date; import java.util.function.Function; /** * A home for the logic to format dates into Strings. * * @author Tony Wardell * Date: 09/04/2018 * Time: 14:54 * Created with IntelliJ IDEA. */ public class DateConverter { private DateConverter() { } private static final DateTimeFormatter YYYYMMDD_DATE_FORMAT = DateTimeFormatter.ofPattern("yyyyMMdd"); public static final Function<java.util.Date, String> toYYYYMMDD = d -> d.toInstant().atZone(ZoneId.systemDefault()).format(YYYYMMDD_DATE_FORMAT); public static String toYearMonthDay(Date date) { return toYYYYMMDD.apply(date); } } ## Instruction: Add test for null date. ## Code After: package uk.ac.ebi.quickgo.annotation.download.converter.helpers; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Date; import java.util.function.Function; /** * A home for the logic to format dates into Strings. * * @author Tony Wardell * Date: 09/04/2018 * Time: 14:54 * Created with IntelliJ IDEA. */ public class DateConverter { private DateConverter() { } private static final DateTimeFormatter YYYYMMDD_DATE_FORMAT = DateTimeFormatter.ofPattern("yyyyMMdd"); public static final Function<java.util.Date, String> toYYYYMMDD = d -> d.toInstant().atZone(ZoneId.systemDefault()).format(YYYYMMDD_DATE_FORMAT); public static String toYearMonthDay(Date date) { return date != null ? toYYYYMMDD.apply(date) : ""; } }
// ... existing code ... d -> d.toInstant().atZone(ZoneId.systemDefault()).format(YYYYMMDD_DATE_FORMAT); public static String toYearMonthDay(Date date) { return date != null ? toYYYYMMDD.apply(date) : ""; } } // ... rest of the code ...
5ac675b36c7c7ba9110b6b16e11a56f554ff8c8e
signbank/video/urls.py
signbank/video/urls.py
from django.conf.urls import * urlpatterns = patterns('', (r'^video/(?P<videoid>.*)$', 'signbank.video.views.video'), (r'^upload/', 'signbank.video.views.addvideo'), (r'^delete/(?P<videoid>.*)$', 'signbank.video.views.deletevideo'), (r'^poster/(?P<videoid>.*)$', 'signbank.video.views.poster'), (r'^iframe/(?P<videoid>.*)$', 'signbank.video.views.iframe'), )
from django.conf.urls import * urlpatterns = patterns('', (r'^video/(?P<videoid>\d+)$', 'signbank.video.views.video'), (r'^upload/', 'signbank.video.views.addvideo'), (r'^delete/(?P<videoid>\d+)$', 'signbank.video.views.deletevideo'), (r'^poster/(?P<videoid>\d+)$', 'signbank.video.views.poster'), (r'^iframe/(?P<videoid>\d+)$', 'signbank.video.views.iframe'), )
Use more explicit pattern for video id in URL to prevent matching trailing slash.
Use more explicit pattern for video id in URL to prevent matching trailing slash.
Python
bsd-3-clause
Signbank/Auslan-signbank,Signbank/Auslan-signbank,Signbank/BSL-signbank,Signbank/BSL-signbank,Signbank/BSL-signbank,Signbank/Auslan-signbank,Signbank/BSL-signbank,Signbank/Auslan-signbank
python
## Code Before: from django.conf.urls import * urlpatterns = patterns('', (r'^video/(?P<videoid>.*)$', 'signbank.video.views.video'), (r'^upload/', 'signbank.video.views.addvideo'), (r'^delete/(?P<videoid>.*)$', 'signbank.video.views.deletevideo'), (r'^poster/(?P<videoid>.*)$', 'signbank.video.views.poster'), (r'^iframe/(?P<videoid>.*)$', 'signbank.video.views.iframe'), ) ## Instruction: Use more explicit pattern for video id in URL to prevent matching trailing slash. ## Code After: from django.conf.urls import * urlpatterns = patterns('', (r'^video/(?P<videoid>\d+)$', 'signbank.video.views.video'), (r'^upload/', 'signbank.video.views.addvideo'), (r'^delete/(?P<videoid>\d+)$', 'signbank.video.views.deletevideo'), (r'^poster/(?P<videoid>\d+)$', 'signbank.video.views.poster'), (r'^iframe/(?P<videoid>\d+)$', 'signbank.video.views.iframe'), )
... urlpatterns = patterns('', (r'^video/(?P<videoid>\d+)$', 'signbank.video.views.video'), (r'^upload/', 'signbank.video.views.addvideo'), (r'^delete/(?P<videoid>\d+)$', 'signbank.video.views.deletevideo'), (r'^poster/(?P<videoid>\d+)$', 'signbank.video.views.poster'), (r'^iframe/(?P<videoid>\d+)$', 'signbank.video.views.iframe'), ) ...
13bdfcd1c659359289f373052b21fb9f998dcc0b
test/Preprocessor/microsoft-import.c
test/Preprocessor/microsoft-import.c
// RUN: %clang_cc1 -E -fms-compatibility %s 2>&1 | grep 'doh.c:100:2: error: #import of type library is an unsupported Microsoft feature' // RUN: %clang_cc1 -E -fms-compatibility %s 2>&1 | grep 'doh.c:200:2: error: #import of type library is an unsupported Microsoft feature' // RUN: %clang_cc1 -E -fms-compatibility %s 2>&1 | grep 'doh.c:300:2: error: #import of type library is an unsupported Microsoft feature' #line 100 "doh.c" #import "pp-record.h" // expected-error {{#import of type library is an unsupported Microsoft feature}} // Test attributes #line 200 "doh.c" #import "pp-record.h" no_namespace, auto_rename // expected-error {{#import of type library is an unsupported Microsoft feature}} // This will also fire the "#import of type library is an unsupported Microsoft feature" // error, but we can't use -verify because there's no way to put the comment on the proper line #line 300 "doh.c" #import "pp-record.h" no_namespace \ auto_rename \ auto_search
// RUN: %clang_cc1 -E -verify -fms-compatibility %s #import "pp-record.h" // expected-error {{#import of type library is an unsupported Microsoft feature}} // Test attributes #import "pp-record.h" no_namespace, auto_rename // expected-error {{#import of type library is an unsupported Microsoft feature}} #import "pp-record.h" no_namespace \ auto_rename \ auto_search // expected-error@-3 {{#import of type library is an unsupported Microsoft feature}}
Migrate a test to -verify
Migrate a test to -verify git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@173716 91177308-0d34-0410-b5e6-96231b3b80d8
C
apache-2.0
llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang
c
## Code Before: // RUN: %clang_cc1 -E -fms-compatibility %s 2>&1 | grep 'doh.c:100:2: error: #import of type library is an unsupported Microsoft feature' // RUN: %clang_cc1 -E -fms-compatibility %s 2>&1 | grep 'doh.c:200:2: error: #import of type library is an unsupported Microsoft feature' // RUN: %clang_cc1 -E -fms-compatibility %s 2>&1 | grep 'doh.c:300:2: error: #import of type library is an unsupported Microsoft feature' #line 100 "doh.c" #import "pp-record.h" // expected-error {{#import of type library is an unsupported Microsoft feature}} // Test attributes #line 200 "doh.c" #import "pp-record.h" no_namespace, auto_rename // expected-error {{#import of type library is an unsupported Microsoft feature}} // This will also fire the "#import of type library is an unsupported Microsoft feature" // error, but we can't use -verify because there's no way to put the comment on the proper line #line 300 "doh.c" #import "pp-record.h" no_namespace \ auto_rename \ auto_search ## Instruction: Migrate a test to -verify git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@173716 91177308-0d34-0410-b5e6-96231b3b80d8 ## Code After: // RUN: %clang_cc1 -E -verify -fms-compatibility %s #import "pp-record.h" // expected-error {{#import of type library is an unsupported Microsoft feature}} // Test attributes #import "pp-record.h" no_namespace, auto_rename // expected-error {{#import of type library is an unsupported Microsoft feature}} #import "pp-record.h" no_namespace \ auto_rename \ auto_search // expected-error@-3 {{#import of type library is an unsupported Microsoft feature}}
... // RUN: %clang_cc1 -E -verify -fms-compatibility %s #import "pp-record.h" // expected-error {{#import of type library is an unsupported Microsoft feature}} // Test attributes #import "pp-record.h" no_namespace, auto_rename // expected-error {{#import of type library is an unsupported Microsoft feature}} #import "pp-record.h" no_namespace \ auto_rename \ auto_search // expected-error@-3 {{#import of type library is an unsupported Microsoft feature}} ...
57280453c222dddff6433e234608e89684e79c93
test_board_pytest.py
test_board_pytest.py
from board import Board def test_constructor(): board = Board(0,0) assert board.boardMatrix.size == 0 assert board.columns == 0 assert board.rows == 0 board = Board(5,5) assert board.boardMatrix.size == 25 assert board.columns == 5 assert board.rows == 5 def test_addPiece(): board = Board(5,5) board.addPiece(0, 1) assert board.boardMatrix.item((4,0)) == 1
from board import Board def test_constructor(): board = Board(0,0) assert board.boardMatrix.size == 0 assert board.columns == 0 assert board.rows == 0 board = Board(5,5) assert board.boardMatrix.size == 25 assert board.columns == 5 assert board.rows == 5 def test_addPiece(): board = Board(5,5) assert board.addPiece(0, 1) == True assert board.boardMatrix.item((4,0)) == 1 assert board.addPiece(0, 1) == True assert board.boardMatrix.item((3,0)) == 1 assert board.addPiece(1, 1) == True assert board.boardMatrix.item((4,1)) == 1 assert board.addPiece(4, 1) == True assert board.boardMatrix.item((4,4)) == 1
Add more tests for addPiece method.
Add more tests for addPiece method.
Python
mit
isaacarvestad/four-in-a-row
python
## Code Before: from board import Board def test_constructor(): board = Board(0,0) assert board.boardMatrix.size == 0 assert board.columns == 0 assert board.rows == 0 board = Board(5,5) assert board.boardMatrix.size == 25 assert board.columns == 5 assert board.rows == 5 def test_addPiece(): board = Board(5,5) board.addPiece(0, 1) assert board.boardMatrix.item((4,0)) == 1 ## Instruction: Add more tests for addPiece method. ## Code After: from board import Board def test_constructor(): board = Board(0,0) assert board.boardMatrix.size == 0 assert board.columns == 0 assert board.rows == 0 board = Board(5,5) assert board.boardMatrix.size == 25 assert board.columns == 5 assert board.rows == 5 def test_addPiece(): board = Board(5,5) assert board.addPiece(0, 1) == True assert board.boardMatrix.item((4,0)) == 1 assert board.addPiece(0, 1) == True assert board.boardMatrix.item((3,0)) == 1 assert board.addPiece(1, 1) == True assert board.boardMatrix.item((4,1)) == 1 assert board.addPiece(4, 1) == True assert board.boardMatrix.item((4,4)) == 1
... def test_addPiece(): board = Board(5,5) assert board.addPiece(0, 1) == True assert board.boardMatrix.item((4,0)) == 1 assert board.addPiece(0, 1) == True assert board.boardMatrix.item((3,0)) == 1 assert board.addPiece(1, 1) == True assert board.boardMatrix.item((4,1)) == 1 assert board.addPiece(4, 1) == True assert board.boardMatrix.item((4,4)) == 1 ...
4f98c8ff8ef724b65106a040ffaf67800dff1611
animations.py
animations.py
def Linear(start, finish): def linear_animator(dt): print 'Linear Animator' print start, finish, dt, (finish-start)*(dt)+start return (finish-start)*(dt)+start return linear_animator
def Linear(start, finish): def linear_animator(dt): return (finish-start)*(dt)+start return linear_animator
Remove some debugging print statements that slipped through.
Remove some debugging print statements that slipped through.
Python
lgpl-2.1
platipy/spyral
python
## Code Before: def Linear(start, finish): def linear_animator(dt): print 'Linear Animator' print start, finish, dt, (finish-start)*(dt)+start return (finish-start)*(dt)+start return linear_animator ## Instruction: Remove some debugging print statements that slipped through. ## Code After: def Linear(start, finish): def linear_animator(dt): return (finish-start)*(dt)+start return linear_animator
... def Linear(start, finish): def linear_animator(dt): return (finish-start)*(dt)+start return linear_animator ...
fab561da9c54e278e7762380bf043a2fe03e39da
xerox/darwin.py
xerox/darwin.py
import subprocess import commands from .base import * def copy(string): """Copy given string into system clipboard.""" try: subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string))) except OSError as why: raise XcodeNotFound return def paste(): """Returns system clipboard contents.""" try: return unicode(commands.getoutput('pbpaste')) except OSError as why: raise XcodeNotFound
import subprocess from .base import * def copy(string): """Copy given string into system clipboard.""" try: subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string))) except OSError as why: raise XcodeNotFound return def paste(): """Returns system clipboard contents.""" try: return unicode(subprocess.check_output('pbpaste')) except OSError as why: raise XcodeNotFound
Use `subprocess.check_output` rather than `commands.getoutput`.
Use `subprocess.check_output` rather than `commands.getoutput`. `commands` is deprecated.
Python
mit
solarce/xerox,kennethreitz/xerox
python
## Code Before: import subprocess import commands from .base import * def copy(string): """Copy given string into system clipboard.""" try: subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string))) except OSError as why: raise XcodeNotFound return def paste(): """Returns system clipboard contents.""" try: return unicode(commands.getoutput('pbpaste')) except OSError as why: raise XcodeNotFound ## Instruction: Use `subprocess.check_output` rather than `commands.getoutput`. `commands` is deprecated. ## Code After: import subprocess from .base import * def copy(string): """Copy given string into system clipboard.""" try: subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string))) except OSError as why: raise XcodeNotFound return def paste(): """Returns system clipboard contents.""" try: return unicode(subprocess.check_output('pbpaste')) except OSError as why: raise XcodeNotFound
// ... existing code ... import subprocess from .base import * // ... modified code ... def paste(): """Returns system clipboard contents.""" try: return unicode(subprocess.check_output('pbpaste')) except OSError as why: raise XcodeNotFound // ... rest of the code ...
a1bc7ad8adbcce9ff5fa64af5b5d1ee1c1891d2b
cloudfoundry-client/src/main/java/org/cloudfoundry/client/v3/packages/_DockerData.java
cloudfoundry-client/src/main/java/org/cloudfoundry/client/v3/packages/_DockerData.java
/* * Copyright 2013-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v3.packages; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.immutables.value.Value; /** * Data type for docker packages */ @JsonDeserialize @Value.Immutable abstract class _DockerData implements PackageData { /** * The Docker image */ @JsonProperty("image") abstract String getImage(); /** * The password for the image's registry */ @JsonProperty("password") abstract String getPassword(); /** * The username for the image's registry */ @JsonProperty("username") abstract String getUsername(); }
/* * Copyright 2013-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v3.packages; import org.cloudfoundry.Nullable; import org.immutables.value.Value; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Data type for docker packages */ @JsonDeserialize @Value.Immutable abstract class _DockerData implements PackageData { /** * The Docker image */ @JsonProperty("image") abstract String getImage(); /** * The password for the image's registry */ @JsonProperty("password") @Nullable abstract String getPassword(); /** * The username for the image's registry */ @JsonProperty("username") @Nullable abstract String getUsername(); }
Make docker credentials Nullable to match API definition
Make docker credentials Nullable to match API definition
Java
apache-2.0
cloudfoundry/cf-java-client,cloudfoundry/cf-java-client,cloudfoundry/cf-java-client
java
## Code Before: /* * Copyright 2013-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v3.packages; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.immutables.value.Value; /** * Data type for docker packages */ @JsonDeserialize @Value.Immutable abstract class _DockerData implements PackageData { /** * The Docker image */ @JsonProperty("image") abstract String getImage(); /** * The password for the image's registry */ @JsonProperty("password") abstract String getPassword(); /** * The username for the image's registry */ @JsonProperty("username") abstract String getUsername(); } ## Instruction: Make docker credentials Nullable to match API definition ## Code After: /* * Copyright 2013-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v3.packages; import org.cloudfoundry.Nullable; import org.immutables.value.Value; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Data type for docker packages */ @JsonDeserialize @Value.Immutable abstract class _DockerData implements PackageData { /** * The Docker image */ @JsonProperty("image") abstract String getImage(); /** * The password for the image's registry */ @JsonProperty("password") @Nullable abstract String getPassword(); /** * The username for the image's registry */ @JsonProperty("username") @Nullable abstract String getUsername(); }
# ... existing code ... package org.cloudfoundry.client.v3.packages; import org.cloudfoundry.Nullable; import org.immutables.value.Value; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; /** * Data type for docker packages # ... modified code ... * The password for the image's registry */ @JsonProperty("password") @Nullable abstract String getPassword(); /** ... * The username for the image's registry */ @JsonProperty("username") @Nullable abstract String getUsername(); } # ... rest of the code ...
df1397dcf6fe849b87db139e8ea3087a5f73649a
tests/graphics/toolbuttons.py
tests/graphics/toolbuttons.py
from gi.repository import Gtk from sugar3.graphics.toolbarbox import ToolbarBox from sugar3.graphics.colorbutton import ColorToolButton from sugar3.graphics.radiotoolbutton import RadioToolButton from sugar3.graphics.toggletoolbutton import ToggleToolButton import common test = common.Test() test.show() vbox = Gtk.VBox() test.pack_start(vbox, True, True, 0) vbox.show() toolbar_box = ToolbarBox() vbox.pack_start(toolbar_box, False, False, 0) toolbar_box.show() radial_button = RadioToolButton(named_icon='view-radial') toolbar_box.toolbar.insert(radial_button, -1) radial_button.show() list_button = RadioToolButton(named_icon='view-list') list_button.props.group = radial_button toolbar_box.toolbar.insert(list_button, -1) list_button.show() separator = Gtk.SeparatorToolItem() toolbar_box.toolbar.insert(separator, -1) separator.show() color_button = ColorToolButton() toolbar_box.toolbar.insert(color_button, -1) color_button.show() favorite_button = ToggleToolButton('emblem-favorite') toolbar_box.toolbar.insert(favorite_button, -1) favorite_button.show() if __name__ == '__main__': common.main(test)
from gi.repository import Gtk from sugar3.graphics.toolbarbox import ToolbarBox from sugar3.graphics.colorbutton import ColorToolButton from sugar3.graphics.radiotoolbutton import RadioToolButton from sugar3.graphics.toggletoolbutton import ToggleToolButton import common test = common.Test() test.show() vbox = Gtk.VBox() test.pack_start(vbox, True, True, 0) vbox.show() toolbar_box = ToolbarBox() vbox.pack_start(toolbar_box, False, False, 0) toolbar_box.show() radial_button = RadioToolButton(icon_name='view-radial') toolbar_box.toolbar.insert(radial_button, -1) radial_button.show() list_button = RadioToolButton(icon_name='view-list') list_button.props.group = radial_button toolbar_box.toolbar.insert(list_button, -1) list_button.show() separator = Gtk.SeparatorToolItem() toolbar_box.toolbar.insert(separator, -1) separator.show() color_button = ColorToolButton() toolbar_box.toolbar.insert(color_button, -1) color_button.show() favorite_button = ToggleToolButton('emblem-favorite') toolbar_box.toolbar.insert(favorite_button, -1) favorite_button.show() if __name__ == '__main__': common.main(test)
Update toolbar buttons testcase with API change for the icon name
Update toolbar buttons testcase with API change for the icon name Follow up of fe11a3aa23c0e7fbc3c0c498e147b0a20348cc12 . Signed-off-by: Manuel Quiñones <[email protected]>
Python
lgpl-2.1
i5o/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,sugarlabs/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,samdroid-apps/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,samdroid-apps/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3
python
## Code Before: from gi.repository import Gtk from sugar3.graphics.toolbarbox import ToolbarBox from sugar3.graphics.colorbutton import ColorToolButton from sugar3.graphics.radiotoolbutton import RadioToolButton from sugar3.graphics.toggletoolbutton import ToggleToolButton import common test = common.Test() test.show() vbox = Gtk.VBox() test.pack_start(vbox, True, True, 0) vbox.show() toolbar_box = ToolbarBox() vbox.pack_start(toolbar_box, False, False, 0) toolbar_box.show() radial_button = RadioToolButton(named_icon='view-radial') toolbar_box.toolbar.insert(radial_button, -1) radial_button.show() list_button = RadioToolButton(named_icon='view-list') list_button.props.group = radial_button toolbar_box.toolbar.insert(list_button, -1) list_button.show() separator = Gtk.SeparatorToolItem() toolbar_box.toolbar.insert(separator, -1) separator.show() color_button = ColorToolButton() toolbar_box.toolbar.insert(color_button, -1) color_button.show() favorite_button = ToggleToolButton('emblem-favorite') toolbar_box.toolbar.insert(favorite_button, -1) favorite_button.show() if __name__ == '__main__': common.main(test) ## Instruction: Update toolbar buttons testcase with API change for the icon name Follow up of fe11a3aa23c0e7fbc3c0c498e147b0a20348cc12 . Signed-off-by: Manuel Quiñones <[email protected]> ## Code After: from gi.repository import Gtk from sugar3.graphics.toolbarbox import ToolbarBox from sugar3.graphics.colorbutton import ColorToolButton from sugar3.graphics.radiotoolbutton import RadioToolButton from sugar3.graphics.toggletoolbutton import ToggleToolButton import common test = common.Test() test.show() vbox = Gtk.VBox() test.pack_start(vbox, True, True, 0) vbox.show() toolbar_box = ToolbarBox() vbox.pack_start(toolbar_box, False, False, 0) toolbar_box.show() radial_button = RadioToolButton(icon_name='view-radial') toolbar_box.toolbar.insert(radial_button, -1) radial_button.show() list_button = RadioToolButton(icon_name='view-list') list_button.props.group = radial_button toolbar_box.toolbar.insert(list_button, -1) list_button.show() separator = Gtk.SeparatorToolItem() toolbar_box.toolbar.insert(separator, -1) separator.show() color_button = ColorToolButton() toolbar_box.toolbar.insert(color_button, -1) color_button.show() favorite_button = ToggleToolButton('emblem-favorite') toolbar_box.toolbar.insert(favorite_button, -1) favorite_button.show() if __name__ == '__main__': common.main(test)
... vbox.pack_start(toolbar_box, False, False, 0) toolbar_box.show() radial_button = RadioToolButton(icon_name='view-radial') toolbar_box.toolbar.insert(radial_button, -1) radial_button.show() list_button = RadioToolButton(icon_name='view-list') list_button.props.group = radial_button toolbar_box.toolbar.insert(list_button, -1) list_button.show() ...
25351cd6b9119ea27123a2fddbbcc274c3620886
examples/examples.py
examples/examples.py
from __future__ import print_function, division import numpy as np import matplotlib.pylab as plt import seaborn as sns from multidensity import MultiDensity from skewstudent import SkewStudent def estimate_bivariate_mle(): ndim = 2 size = (1000, ndim) data = np.random.normal(size=size) eta, lam = 4, -.9 skst = SkewStudent(eta=eta, lam=lam) data = skst.rvs(size=size) out = MultiDensity.fit_mle(data=data) print(out) mdens = MultiDensity() mdens.from_theta(out.x) fig, axes = plt.subplots(nrows=size[1], ncols=1) for innov, ax in zip(data.T, axes): sns.kdeplot(innov, ax=ax) lines = [ax.get_lines()[0].get_xdata() for ax in axes] lines = np.vstack(lines).T marginals = mdens.marginals(lines) for line, margin, ax in zip(lines.T, marginals.T, axes): ax.plot(line, margin) plt.show() if __name__ == '__main__': estimate_bivariate_mle()
from __future__ import print_function, division import numpy as np import matplotlib.pylab as plt import seaborn as sns from multidensity import MultiDensity from skewstudent import SkewStudent def estimate_bivariate_mle(): ndim = 2 size = (1000, ndim) data = np.random.normal(size=size) eta, lam = 4, -.9 skst = SkewStudent(eta=eta, lam=lam) data = skst.rvs(size=size) out = MultiDensity.fit_mle(data=data) print(out) mdens = MultiDensity() mdens.from_theta(out.x) fig, axes = plt.subplots(nrows=size[1], ncols=1) for innov, ax in zip(data.T, axes): sns.kdeplot(innov, ax=ax, label='data') lines = [ax.get_lines()[0].get_xdata() for ax in axes] lines = np.vstack(lines).T marginals = mdens.marginals(lines) for line, margin, ax in zip(lines.T, marginals.T, axes): ax.plot(line, margin, label='fitted') ax.legend() plt.show() if __name__ == '__main__': estimate_bivariate_mle()
Add plot legend in the example
Add plot legend in the example
Python
mit
khrapovs/multidensity
python
## Code Before: from __future__ import print_function, division import numpy as np import matplotlib.pylab as plt import seaborn as sns from multidensity import MultiDensity from skewstudent import SkewStudent def estimate_bivariate_mle(): ndim = 2 size = (1000, ndim) data = np.random.normal(size=size) eta, lam = 4, -.9 skst = SkewStudent(eta=eta, lam=lam) data = skst.rvs(size=size) out = MultiDensity.fit_mle(data=data) print(out) mdens = MultiDensity() mdens.from_theta(out.x) fig, axes = plt.subplots(nrows=size[1], ncols=1) for innov, ax in zip(data.T, axes): sns.kdeplot(innov, ax=ax) lines = [ax.get_lines()[0].get_xdata() for ax in axes] lines = np.vstack(lines).T marginals = mdens.marginals(lines) for line, margin, ax in zip(lines.T, marginals.T, axes): ax.plot(line, margin) plt.show() if __name__ == '__main__': estimate_bivariate_mle() ## Instruction: Add plot legend in the example ## Code After: from __future__ import print_function, division import numpy as np import matplotlib.pylab as plt import seaborn as sns from multidensity import MultiDensity from skewstudent import SkewStudent def estimate_bivariate_mle(): ndim = 2 size = (1000, ndim) data = np.random.normal(size=size) eta, lam = 4, -.9 skst = SkewStudent(eta=eta, lam=lam) data = skst.rvs(size=size) out = MultiDensity.fit_mle(data=data) print(out) mdens = MultiDensity() mdens.from_theta(out.x) fig, axes = plt.subplots(nrows=size[1], ncols=1) for innov, ax in zip(data.T, axes): sns.kdeplot(innov, ax=ax, label='data') lines = [ax.get_lines()[0].get_xdata() for ax in axes] lines = np.vstack(lines).T marginals = mdens.marginals(lines) for line, margin, ax in zip(lines.T, marginals.T, axes): ax.plot(line, margin, label='fitted') ax.legend() plt.show() if __name__ == '__main__': estimate_bivariate_mle()
... fig, axes = plt.subplots(nrows=size[1], ncols=1) for innov, ax in zip(data.T, axes): sns.kdeplot(innov, ax=ax, label='data') lines = [ax.get_lines()[0].get_xdata() for ax in axes] lines = np.vstack(lines).T ... marginals = mdens.marginals(lines) for line, margin, ax in zip(lines.T, marginals.T, axes): ax.plot(line, margin, label='fitted') ax.legend() plt.show() ...
b6e3b99a3283d15a1e2e3c72f8a4f0bb8e9a3d2a
bits/src/main/java/dk/aau/kah/bits/evaluation/ExperimentFactory.java
bits/src/main/java/dk/aau/kah/bits/evaluation/ExperimentFactory.java
package dk.aau.kah.bits.evaluation; import da.aau.kah.bits.exceptions.InvalidDatabaseConfig; import dk.aau.kah.bits.database.DatabaseConfig; import dk.aau.kah.bits.database.DatabaseHandler; public class ExperimentFactory { public ExperimentFactory() { } public AbstractEvaluationHandler makeEvaluation(DatabaseHandler databaseHandle, DatabaseConfig databaseConfig) throws InvalidDatabaseConfig { String datasetType = databaseConfig.getDatasetType(); AbstractEvaluationHandler evaluationhandler; if (datasetType.equals("TPC-H")) { evaluationhandler = new EvaluationHandlerTPCH(databaseHandle); } else { throw new InvalidDatabaseConfig("The Dataset Type "+databaseConfig.getDatasetType()+" is not known, implementation is missing."); } return evaluationhandler; } }
package dk.aau.kah.bits.evaluation; import da.aau.kah.bits.exceptions.InvalidDatabaseConfig; import dk.aau.kah.bits.database.DatabaseConfig; import dk.aau.kah.bits.database.DatabaseHandler; public class ExperimentFactory { public ExperimentFactory() { } public AbstractExperimentHandler makeEvaluation(DatabaseHandler databaseHandle, DatabaseConfig databaseConfig) throws InvalidDatabaseConfig { String datasetType = databaseConfig.getDatasetType(); AbstractExperimentHandler evaluationhandler; if (datasetType.equals("TPC-H")) { evaluationhandler = new ExperimentHandlerTPCH(databaseHandle); } else { throw new InvalidDatabaseConfig("The Dataset Type "+databaseConfig.getDatasetType()+" is not known, implementation is missing."); } return evaluationhandler; } }
Rename some functions to experiment instead of evaluation
Rename some functions to experiment instead of evaluation
Java
apache-2.0
kimajakobsen/BITS,kimajakobsen/BITS
java
## Code Before: package dk.aau.kah.bits.evaluation; import da.aau.kah.bits.exceptions.InvalidDatabaseConfig; import dk.aau.kah.bits.database.DatabaseConfig; import dk.aau.kah.bits.database.DatabaseHandler; public class ExperimentFactory { public ExperimentFactory() { } public AbstractEvaluationHandler makeEvaluation(DatabaseHandler databaseHandle, DatabaseConfig databaseConfig) throws InvalidDatabaseConfig { String datasetType = databaseConfig.getDatasetType(); AbstractEvaluationHandler evaluationhandler; if (datasetType.equals("TPC-H")) { evaluationhandler = new EvaluationHandlerTPCH(databaseHandle); } else { throw new InvalidDatabaseConfig("The Dataset Type "+databaseConfig.getDatasetType()+" is not known, implementation is missing."); } return evaluationhandler; } } ## Instruction: Rename some functions to experiment instead of evaluation ## Code After: package dk.aau.kah.bits.evaluation; import da.aau.kah.bits.exceptions.InvalidDatabaseConfig; import dk.aau.kah.bits.database.DatabaseConfig; import dk.aau.kah.bits.database.DatabaseHandler; public class ExperimentFactory { public ExperimentFactory() { } public AbstractExperimentHandler makeEvaluation(DatabaseHandler databaseHandle, DatabaseConfig databaseConfig) throws InvalidDatabaseConfig { String datasetType = databaseConfig.getDatasetType(); AbstractExperimentHandler evaluationhandler; if (datasetType.equals("TPC-H")) { evaluationhandler = new ExperimentHandlerTPCH(databaseHandle); } else { throw new InvalidDatabaseConfig("The Dataset Type "+databaseConfig.getDatasetType()+" is not known, implementation is missing."); } return evaluationhandler; } }
# ... existing code ... } public AbstractExperimentHandler makeEvaluation(DatabaseHandler databaseHandle, DatabaseConfig databaseConfig) throws InvalidDatabaseConfig { String datasetType = databaseConfig.getDatasetType(); AbstractExperimentHandler evaluationhandler; if (datasetType.equals("TPC-H")) { evaluationhandler = new ExperimentHandlerTPCH(databaseHandle); } else { throw new InvalidDatabaseConfig("The Dataset Type "+databaseConfig.getDatasetType()+" is not known, implementation is missing."); } # ... rest of the code ...
85cf0af73ddfdce0281a112e4e86d1104e0222e1
appengine_config.py
appengine_config.py
import os import site import sys approot = os.path.dirname(__file__) sys.path.append(os.path.join(approot, 'lib')) site.addsitedir(os.path.join(approot, 'site-packages'))
import os import site import sys approot = os.path.dirname(__file__) sys.path.append(os.path.join(approot, 'lib')) site.addsitedir(os.path.join(approot, 'site-packages')) def webapp_add_wsgi_middleware(app): from google.appengine.ext.appstats import recording app = recording.appstats_wsgi_middleware(app) return app
Add the appstats recording middleware
Add the appstats recording middleware
Python
mit
xchewtoyx/pulldb
python
## Code Before: import os import site import sys approot = os.path.dirname(__file__) sys.path.append(os.path.join(approot, 'lib')) site.addsitedir(os.path.join(approot, 'site-packages')) ## Instruction: Add the appstats recording middleware ## Code After: import os import site import sys approot = os.path.dirname(__file__) sys.path.append(os.path.join(approot, 'lib')) site.addsitedir(os.path.join(approot, 'site-packages')) def webapp_add_wsgi_middleware(app): from google.appengine.ext.appstats import recording app = recording.appstats_wsgi_middleware(app) return app
// ... existing code ... approot = os.path.dirname(__file__) sys.path.append(os.path.join(approot, 'lib')) site.addsitedir(os.path.join(approot, 'site-packages')) def webapp_add_wsgi_middleware(app): from google.appengine.ext.appstats import recording app = recording.appstats_wsgi_middleware(app) return app // ... rest of the code ...
ad069a50ec7a4b4e6b1dac679e071279e128c824
been/source/markdowndirectory.py
been/source/markdowndirectory.py
from been.core import DirectorySource, source_registry from hashlib import sha1 import re import unicodedata import time import markdown # slugify from Django source (BSD license) def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): md = markdown.Markdown(extensions=['meta']) event['content'] = md.convert(event['raw']) event['title'] = ' '.join(md.Meta.get('title', [event['filename']])) event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])])) event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]])) if md.Meta.get('published'): # Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT) event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S'))) event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest() if time.gmtime() < event['timestamp']: return None else: return event source_registry.add(MarkdownDirectory)
from been.core import DirectorySource, source_registry from hashlib import sha1 import re import unicodedata import time import markdown # slugify from Django source (BSD license) def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): md = markdown.Markdown(extensions=['meta']) event['content'] = md.convert(event['raw']) event['title'] = ' '.join(md.Meta.get('title', [event['filename']])) event['author'] = ' '.join(md.Meta.get('author', [''])) event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])])) event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]])) if md.Meta.get('published'): # Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT) event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S'))) event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest() if time.gmtime() < event['timestamp']: return None else: return event source_registry.add(MarkdownDirectory)
Allow MarkdownDirectory events to specify author.
Allow MarkdownDirectory events to specify author.
Python
bsd-3-clause
chromakode/been
python
## Code Before: from been.core import DirectorySource, source_registry from hashlib import sha1 import re import unicodedata import time import markdown # slugify from Django source (BSD license) def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): md = markdown.Markdown(extensions=['meta']) event['content'] = md.convert(event['raw']) event['title'] = ' '.join(md.Meta.get('title', [event['filename']])) event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])])) event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]])) if md.Meta.get('published'): # Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT) event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S'))) event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest() if time.gmtime() < event['timestamp']: return None else: return event source_registry.add(MarkdownDirectory) ## Instruction: Allow MarkdownDirectory events to specify author. ## Code After: from been.core import DirectorySource, source_registry from hashlib import sha1 import re import unicodedata import time import markdown # slugify from Django source (BSD license) def slugify(value): value = unicodedata.normalize('NFKD', unicode(value)).encode('ascii', 'ignore') value = unicode(re.sub('[^\w\s-]', '', value).strip().lower()) return re.sub('[-\s]+', '-', value) class MarkdownDirectory(DirectorySource): kind = 'markdown' def process_event(self, event): md = markdown.Markdown(extensions=['meta']) event['content'] = md.convert(event['raw']) event['title'] = ' '.join(md.Meta.get('title', [event['filename']])) event['author'] = ' '.join(md.Meta.get('author', [''])) event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])])) event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]])) if md.Meta.get('published'): # Parse time, then convert struct_time (local) -> epoch (GMT) -> struct_time (GMT) event['timestamp'] = time.gmtime(time.mktime(time.strptime(' '.join(md.Meta.get('published')), '%Y-%m-%d %H:%M:%S'))) event['_id'] = sha1(event['full_path'].encode('utf-8')).hexdigest() if time.gmtime() < event['timestamp']: return None else: return event source_registry.add(MarkdownDirectory)
// ... existing code ... md = markdown.Markdown(extensions=['meta']) event['content'] = md.convert(event['raw']) event['title'] = ' '.join(md.Meta.get('title', [event['filename']])) event['author'] = ' '.join(md.Meta.get('author', [''])) event['slug'] = '-'.join(md.Meta.get('slug', [slugify(event['title'])])) event['summary'] = ' '.join(md.Meta.get('summary', [event['raw'][:100]])) if md.Meta.get('published'): // ... rest of the code ...
e98b4f2a343643c513d8cd4cf8b34a446322b0de
watson/common/exception.py
watson/common/exception.py
"""Watson's base exception handling."""
"""Watson's base exception handling.""" class WatsonException(Exception): """Base watson exception To correctly use this class, inherit from it and define a `template` property. That `template` will be formated using the keyword arguments provided to the constructor. Example: :: class NotFound(WatsonException): '''The required object is not available in container.''' template = "The %(object)r was not found in %(container)s." raise NotFound(object=object_name, container=container) """ template = "An unknown exception occurred." def __init__(self, message=None, **kwargs): message = message or self.template try: message = message % kwargs except (TypeError, KeyError): # Something went wrong during message formatting. # Probably kwargs doesn't match a variable in the message. message = ("Message: %(template)s. Extra or " "missing info: %(kwargs)s" % {"template": message, "kwargs": kwargs}) super(WatsonException, self).__init__(message)
Add base exeption for Watson project
Add base exeption for Watson project
Python
mit
alexandrucoman/watson,c-square/watson,c-square/evorepo-common
python
## Code Before: """Watson's base exception handling.""" ## Instruction: Add base exeption for Watson project ## Code After: """Watson's base exception handling.""" class WatsonException(Exception): """Base watson exception To correctly use this class, inherit from it and define a `template` property. That `template` will be formated using the keyword arguments provided to the constructor. Example: :: class NotFound(WatsonException): '''The required object is not available in container.''' template = "The %(object)r was not found in %(container)s." raise NotFound(object=object_name, container=container) """ template = "An unknown exception occurred." def __init__(self, message=None, **kwargs): message = message or self.template try: message = message % kwargs except (TypeError, KeyError): # Something went wrong during message formatting. # Probably kwargs doesn't match a variable in the message. message = ("Message: %(template)s. Extra or " "missing info: %(kwargs)s" % {"template": message, "kwargs": kwargs}) super(WatsonException, self).__init__(message)
# ... existing code ... """Watson's base exception handling.""" class WatsonException(Exception): """Base watson exception To correctly use this class, inherit from it and define a `template` property. That `template` will be formated using the keyword arguments provided to the constructor. Example: :: class NotFound(WatsonException): '''The required object is not available in container.''' template = "The %(object)r was not found in %(container)s." raise NotFound(object=object_name, container=container) """ template = "An unknown exception occurred." def __init__(self, message=None, **kwargs): message = message or self.template try: message = message % kwargs except (TypeError, KeyError): # Something went wrong during message formatting. # Probably kwargs doesn't match a variable in the message. message = ("Message: %(template)s. Extra or " "missing info: %(kwargs)s" % {"template": message, "kwargs": kwargs}) super(WatsonException, self).__init__(message) # ... rest of the code ...
5e3b29d7348380e53b69be19af3c90475f61683e
api/src/main/java/org/vivoweb/webapp/startup/SiteAdminSetup.java
api/src/main/java/org/vivoweb/webapp/startup/SiteAdminSetup.java
package org.vivoweb.webapp.startup; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.InstitutionalInternalClassController; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.SiteAdminController; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder; import edu.cornell.mannlib.vitro.webapp.utils.menuManagement.MenuManagementDataUtils; import edu.cornell.mannlib.vitro.webapp.utils.menuManagement.VIVOMenuManagementDataUtils; import edu.cornell.mannlib.vitro.webapp.visualization.tools.ToolsRequestHandler; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; public class SiteAdminSetup implements ServletContextListener { @Override public void contextInitialized(ServletContextEvent servletContextEvent) { SiteAdminController.registerSiteMaintenanceUrl("rebuildVisCache", UrlBuilder.getUrl("/vis/tools"), ToolsRequestHandler.REQUIRED_ACTIONS); SiteAdminController.registerSiteConfigData("internalClass", UrlBuilder.getUrl("/processInstitutionalInternalClass"), InstitutionalInternalClassController.REQUIRED_ACTIONS); } @Override public void contextDestroyed(ServletContextEvent servletContextEvent) { } }
package org.vivoweb.webapp.startup; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.InstitutionalInternalClassController; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.SiteAdminController; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder; import edu.cornell.mannlib.vitro.webapp.utils.menuManagement.MenuManagementDataUtils; import edu.cornell.mannlib.vitro.webapp.utils.menuManagement.VIVOMenuManagementDataUtils; import edu.cornell.mannlib.vitro.webapp.visualization.tools.ToolsRequestHandler; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; public class SiteAdminSetup implements ServletContextListener { @Override public void contextInitialized(ServletContextEvent servletContextEvent) { SiteAdminController.registerSiteMaintenanceUrl("rebuildVisCache", "/vis/tools", null, ToolsRequestHandler.REQUIRED_ACTIONS); SiteAdminController.registerSiteConfigData("internalClass", "/processInstitutionalInternalClass", null, InstitutionalInternalClassController.REQUIRED_ACTIONS); } @Override public void contextDestroyed(ServletContextEvent servletContextEvent) { } }
Fix broken site admin links
Fix broken site admin links
Java
bsd-3-clause
vivo-project/VIVO,vivo-project/VIVO,vivo-project/VIVO,vivo-project/VIVO
java
## Code Before: package org.vivoweb.webapp.startup; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.InstitutionalInternalClassController; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.SiteAdminController; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder; import edu.cornell.mannlib.vitro.webapp.utils.menuManagement.MenuManagementDataUtils; import edu.cornell.mannlib.vitro.webapp.utils.menuManagement.VIVOMenuManagementDataUtils; import edu.cornell.mannlib.vitro.webapp.visualization.tools.ToolsRequestHandler; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; public class SiteAdminSetup implements ServletContextListener { @Override public void contextInitialized(ServletContextEvent servletContextEvent) { SiteAdminController.registerSiteMaintenanceUrl("rebuildVisCache", UrlBuilder.getUrl("/vis/tools"), ToolsRequestHandler.REQUIRED_ACTIONS); SiteAdminController.registerSiteConfigData("internalClass", UrlBuilder.getUrl("/processInstitutionalInternalClass"), InstitutionalInternalClassController.REQUIRED_ACTIONS); } @Override public void contextDestroyed(ServletContextEvent servletContextEvent) { } } ## Instruction: Fix broken site admin links ## Code After: package org.vivoweb.webapp.startup; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.InstitutionalInternalClassController; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.SiteAdminController; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder; import edu.cornell.mannlib.vitro.webapp.utils.menuManagement.MenuManagementDataUtils; import edu.cornell.mannlib.vitro.webapp.utils.menuManagement.VIVOMenuManagementDataUtils; import edu.cornell.mannlib.vitro.webapp.visualization.tools.ToolsRequestHandler; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; public class SiteAdminSetup implements ServletContextListener { @Override public void contextInitialized(ServletContextEvent servletContextEvent) { SiteAdminController.registerSiteMaintenanceUrl("rebuildVisCache", "/vis/tools", null, ToolsRequestHandler.REQUIRED_ACTIONS); SiteAdminController.registerSiteConfigData("internalClass", "/processInstitutionalInternalClass", null, InstitutionalInternalClassController.REQUIRED_ACTIONS); } @Override public void contextDestroyed(ServletContextEvent servletContextEvent) { } }
# ... existing code ... public class SiteAdminSetup implements ServletContextListener { @Override public void contextInitialized(ServletContextEvent servletContextEvent) { SiteAdminController.registerSiteMaintenanceUrl("rebuildVisCache", "/vis/tools", null, ToolsRequestHandler.REQUIRED_ACTIONS); SiteAdminController.registerSiteConfigData("internalClass", "/processInstitutionalInternalClass", null, InstitutionalInternalClassController.REQUIRED_ACTIONS); } @Override # ... rest of the code ...
573386b862abd6b6d118805c92425ce9a9f908b8
d02/d02s04/src/main/java/com/oce/springboot/training/d02/s04/repository/ProductRepository.java
d02/d02s04/src/main/java/com/oce/springboot/training/d02/s04/repository/ProductRepository.java
package com.oce.springboot.training.d02.s04.repository; import com.oce.springboot.training.d02.s04.model.Product; import org.springframework.stereotype.Repository; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.List; @Repository public class ProductRepository { // an in-memory list of products private List<Product> products = new ArrayList<>(1); @PostConstruct public void init() { products.add(getDefaultProduct()); } @SuppressWarnings("unused") public Product get(int id) { return getDefaultProduct(); } public List<Product> getAll() { return products; } public void create(final Product product) { products.add(product); } public void update(final int id, final Product product) { final Product currentProduct = products.get(id < products.size() ? id : 0); currentProduct.setName(product.getName()); } public void delete(final int id) { products.remove(id < products.size() ? id : 0); } private Product getDefaultProduct() { final Product product = new Product(); product.setId(24); product.setName("Dell XPS 9360"); return product; } }
package com.oce.springboot.training.d02.s04.repository; import com.oce.springboot.training.d02.s04.model.Product; import org.springframework.stereotype.Repository; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.List; @Repository public class ProductRepository { // an in-memory list of products private List<Product> products = new ArrayList<>(1); @PostConstruct public void init() { products.add(getDefaultProduct()); } @SuppressWarnings("unused") public Product get(int id) { return id < products.size() ? products.get(id) : null; } public List<Product> getAll() { return products; } public void create(final Product product) { products.add(product); } public void update(final int id, final Product product) { final Product currentProduct = products.get(id < products.size() ? id : 0); currentProduct.setName(product.getName()); } public void delete(final int id) { products.remove(id < products.size() ? id : 0); } private Product getDefaultProduct() { final Product product = new Product(); product.setId(24); product.setName("Dell XPS 9360"); return product; } }
Return a null product if there isn't a saved one
[improve] Return a null product if there isn't a saved one
Java
apache-2.0
bogdansolga/nokia-spring-boot-training,bogdansolga/nokia-spring-boot-training,bogdansolga/oce-spring-boot-training,bogdansolga/oce-spring-boot-training
java
## Code Before: package com.oce.springboot.training.d02.s04.repository; import com.oce.springboot.training.d02.s04.model.Product; import org.springframework.stereotype.Repository; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.List; @Repository public class ProductRepository { // an in-memory list of products private List<Product> products = new ArrayList<>(1); @PostConstruct public void init() { products.add(getDefaultProduct()); } @SuppressWarnings("unused") public Product get(int id) { return getDefaultProduct(); } public List<Product> getAll() { return products; } public void create(final Product product) { products.add(product); } public void update(final int id, final Product product) { final Product currentProduct = products.get(id < products.size() ? id : 0); currentProduct.setName(product.getName()); } public void delete(final int id) { products.remove(id < products.size() ? id : 0); } private Product getDefaultProduct() { final Product product = new Product(); product.setId(24); product.setName("Dell XPS 9360"); return product; } } ## Instruction: [improve] Return a null product if there isn't a saved one ## Code After: package com.oce.springboot.training.d02.s04.repository; import com.oce.springboot.training.d02.s04.model.Product; import org.springframework.stereotype.Repository; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.List; @Repository public class ProductRepository { // an in-memory list of products private List<Product> products = new ArrayList<>(1); @PostConstruct public void init() { products.add(getDefaultProduct()); } @SuppressWarnings("unused") public Product get(int id) { return id < products.size() ? products.get(id) : null; } public List<Product> getAll() { return products; } public void create(final Product product) { products.add(product); } public void update(final int id, final Product product) { final Product currentProduct = products.get(id < products.size() ? id : 0); currentProduct.setName(product.getName()); } public void delete(final int id) { products.remove(id < products.size() ? id : 0); } private Product getDefaultProduct() { final Product product = new Product(); product.setId(24); product.setName("Dell XPS 9360"); return product; } }
... @SuppressWarnings("unused") public Product get(int id) { return id < products.size() ? products.get(id) : null; } public List<Product> getAll() { ...
ed16ce94936816566eafad1075d5a7425f7a563e
test/SyscallsMock.h
test/SyscallsMock.h
namespace evil { class SyscallsMock { private: static thread_local std::stack<SyscallsMock *> _instance_stack; public: SyscallsMock() { _instance_stack.push(this); } ~SyscallsMock() { _instance_stack.pop(); } static SyscallsMock *instance() { return _instance_stack.top(); } MOCK_METHOD2(_access, int(const char *path, int mode)); MOCK_METHOD1(_close, int(int fd)); MOCK_METHOD1(_isatty, int(int fd)); MOCK_METHOD2(_kill, int(long pid, int sig)); MOCK_METHOD3(_open, int(const char *path, int flags, int mode)); MOCK_METHOD0(_getpid, long(void)); MOCK_METHOD3(_write, ssize_t(int fd, const void *buf, size_t count)); MOCK_METHOD1(_sbrk, void *(ptrdiff_t increment)); MOCK_METHOD1(_exit, void(int exit_code)); }; } // namespace evil #endif // __EVIL_TEST_SYSCALLS_MOCK_H
namespace evil { class SyscallsMock { private: static thread_local std::stack<SyscallsMock *> _instance_stack; public: SyscallsMock() { _instance_stack.push(this); } ~SyscallsMock() { _instance_stack.pop(); } static SyscallsMock *instance() { return _instance_stack.top(); } MOCK_METHOD2(_access, int(const std::string &path, int mode)); MOCK_METHOD1(_close, int(int fd)); MOCK_METHOD1(_isatty, int(int fd)); MOCK_METHOD2(_kill, int(long pid, int sig)); MOCK_METHOD3(_open, int(const std::string &path, int flags, int mode)); MOCK_METHOD0(_getpid, long(void)); MOCK_METHOD3(_write, ssize_t(int fd, const void *buf, size_t count)); MOCK_METHOD1(_sbrk, void *(ptrdiff_t increment)); MOCK_METHOD1(_exit, void(int exit_code)); }; } // namespace evil #endif // __EVIL_TEST_SYSCALLS_MOCK_H
Make mocked functions take std::string to allow == compare
Make mocked functions take std::string to allow == compare
C
mit
dextero/evilibc,dextero/evilibc,dextero/evilibc
c
## Code Before: namespace evil { class SyscallsMock { private: static thread_local std::stack<SyscallsMock *> _instance_stack; public: SyscallsMock() { _instance_stack.push(this); } ~SyscallsMock() { _instance_stack.pop(); } static SyscallsMock *instance() { return _instance_stack.top(); } MOCK_METHOD2(_access, int(const char *path, int mode)); MOCK_METHOD1(_close, int(int fd)); MOCK_METHOD1(_isatty, int(int fd)); MOCK_METHOD2(_kill, int(long pid, int sig)); MOCK_METHOD3(_open, int(const char *path, int flags, int mode)); MOCK_METHOD0(_getpid, long(void)); MOCK_METHOD3(_write, ssize_t(int fd, const void *buf, size_t count)); MOCK_METHOD1(_sbrk, void *(ptrdiff_t increment)); MOCK_METHOD1(_exit, void(int exit_code)); }; } // namespace evil #endif // __EVIL_TEST_SYSCALLS_MOCK_H ## Instruction: Make mocked functions take std::string to allow == compare ## Code After: namespace evil { class SyscallsMock { private: static thread_local std::stack<SyscallsMock *> _instance_stack; public: SyscallsMock() { _instance_stack.push(this); } ~SyscallsMock() { _instance_stack.pop(); } static SyscallsMock *instance() { return _instance_stack.top(); } MOCK_METHOD2(_access, int(const std::string &path, int mode)); MOCK_METHOD1(_close, int(int fd)); MOCK_METHOD1(_isatty, int(int fd)); MOCK_METHOD2(_kill, int(long pid, int sig)); MOCK_METHOD3(_open, int(const std::string &path, int flags, int mode)); MOCK_METHOD0(_getpid, long(void)); MOCK_METHOD3(_write, ssize_t(int fd, const void *buf, size_t count)); MOCK_METHOD1(_sbrk, void *(ptrdiff_t increment)); MOCK_METHOD1(_exit, void(int exit_code)); }; } // namespace evil #endif // __EVIL_TEST_SYSCALLS_MOCK_H
// ... existing code ... static SyscallsMock *instance() { return _instance_stack.top(); } MOCK_METHOD2(_access, int(const std::string &path, int mode)); MOCK_METHOD1(_close, int(int fd)); MOCK_METHOD1(_isatty, int(int fd)); MOCK_METHOD2(_kill, int(long pid, int sig)); MOCK_METHOD3(_open, int(const std::string &path, int flags, int mode)); MOCK_METHOD0(_getpid, long(void)); MOCK_METHOD3(_write, ssize_t(int fd, const void *buf, size_t count)); MOCK_METHOD1(_sbrk, void *(ptrdiff_t increment)); // ... rest of the code ...
5ae5c27f69cdfb1c53ada0a2aa90d76c4d3ce421
memcached.py
memcached.py
import telnetlib import sys import socket import time def main(host='127.0.0.1', port='11211'): hostname = socket.gethostname().replace('.', '_') ts = str(int(time.time())) template = 'servers.' + hostname + '.software.memcached.{1} {2} ' + ts for line in command(host, port, 'stats').splitlines(): if line.startswith('STAT '): header, key, value = line.split() if key.replace('_', '').isalpha() and is_float(value): print(template.format(hostname, key, value)) def command(host, port, cmd): """Write a command to telnet and return the response""" client = telnetlib.Telnet(host, port) client.write(cmd + '\n') return client.read_until('END') def is_float(value): try: float(value) except ValueError: return False else: return True if __name__ == '__main__': main(*sys.argv[1:])
import telnetlib import sys import socket import time import re def main(host='127.0.0.1', port='11211'): hostname = socket.gethostname().replace('.', '_') ts = str(int(time.time())) template = 'servers.' + hostname + '.software.memcached.{1} {2} ' + ts pattern = re.compile('STAT \w+ \d+(.\d+)?$') for line in command(host, port, 'stats').splitlines(): if pattern.match(line): header, key, value = line.split() print(template.format(hostname, key, value)) def command(host, port, cmd): """Write a command to telnet and return the response""" client = telnetlib.Telnet(host, port) client.write(cmd + '\n') return client.read_until('END') def is_float(value): try: float(value) except ValueError: return False else: return True if __name__ == '__main__': main(*sys.argv[1:])
Use regexp for checking the line
Use regexp for checking the line
Python
mit
innogames/igcollect
python
## Code Before: import telnetlib import sys import socket import time def main(host='127.0.0.1', port='11211'): hostname = socket.gethostname().replace('.', '_') ts = str(int(time.time())) template = 'servers.' + hostname + '.software.memcached.{1} {2} ' + ts for line in command(host, port, 'stats').splitlines(): if line.startswith('STAT '): header, key, value = line.split() if key.replace('_', '').isalpha() and is_float(value): print(template.format(hostname, key, value)) def command(host, port, cmd): """Write a command to telnet and return the response""" client = telnetlib.Telnet(host, port) client.write(cmd + '\n') return client.read_until('END') def is_float(value): try: float(value) except ValueError: return False else: return True if __name__ == '__main__': main(*sys.argv[1:]) ## Instruction: Use regexp for checking the line ## Code After: import telnetlib import sys import socket import time import re def main(host='127.0.0.1', port='11211'): hostname = socket.gethostname().replace('.', '_') ts = str(int(time.time())) template = 'servers.' + hostname + '.software.memcached.{1} {2} ' + ts pattern = re.compile('STAT \w+ \d+(.\d+)?$') for line in command(host, port, 'stats').splitlines(): if pattern.match(line): header, key, value = line.split() print(template.format(hostname, key, value)) def command(host, port, cmd): """Write a command to telnet and return the response""" client = telnetlib.Telnet(host, port) client.write(cmd + '\n') return client.read_until('END') def is_float(value): try: float(value) except ValueError: return False else: return True if __name__ == '__main__': main(*sys.argv[1:])
... import sys import socket import time import re def main(host='127.0.0.1', port='11211'): ... hostname = socket.gethostname().replace('.', '_') ts = str(int(time.time())) template = 'servers.' + hostname + '.software.memcached.{1} {2} ' + ts pattern = re.compile('STAT \w+ \d+(.\d+)?$') for line in command(host, port, 'stats').splitlines(): if pattern.match(line): header, key, value = line.split() print(template.format(hostname, key, value)) def command(host, port, cmd): ...
d150db290a72590e0f7cf9dae485bf98901bb2c2
web_ui/helpers.py
web_ui/helpers.py
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] def reset_userdata(): """Reset all userdata files""" import os for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: try: os.remove(app.config[fileid]) except OSError: app.logger.warning("RESET: {} not found".format(fileid)) else: app.logger.warning("RESET: {} deleted")
Add helper method for resetting user data
Add helper method for resetting user data
Python
apache-2.0
ciex/souma,ciex/souma,ciex/souma
python
## Code Before: from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] ## Instruction: Add helper method for resetting user data ## Code After: from web_ui import app from flask import session from datetime import datetime # For calculating scores epoch = datetime.utcfromtimestamp(0) epoch_seconds = lambda dt: (dt - epoch).total_seconds() - 1356048000 def score(star_object): import random return random.random() * 100 - random.random() * 10 def get_active_persona(): from nucleus.models import Persona """ Return the currently active persona or 0 if there is no controlled persona. """ if 'active_persona' not in session or session['active_persona'] is None: """Activate first Persona with a private key""" controlled_persona = Persona.query.filter('sign_private != ""').first() if controlled_persona is None: return "" else: session['active_persona'] = controlled_persona.id return session['active_persona'] def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] def reset_userdata(): """Reset all userdata files""" import os for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: try: os.remove(app.config[fileid]) except OSError: app.logger.warning("RESET: {} not found".format(fileid)) else: app.logger.warning("RESET: {} deleted")
# ... existing code ... def allowed_file(filename): return '.' in filename and \ filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS'] def reset_userdata(): """Reset all userdata files""" import os for fileid in ["DATABASE", "SECRET_KEY_FILE", "PASSWORD_HASH_FILE"]: try: os.remove(app.config[fileid]) except OSError: app.logger.warning("RESET: {} not found".format(fileid)) else: app.logger.warning("RESET: {} deleted") # ... rest of the code ...
55cfa656096e17cb624643b292d479752eff9b84
myrrh/python/registry.h
myrrh/python/registry.h
/** @file Copyright John Reid 2013 */ #ifndef MYRRH_PYTHON_REGISTRY_H_ #define MYRRH_PYTHON_REGISTRY_H_ #ifdef _MSC_VER # pragma once #endif //_MSC_VER namespace myrrh { namespace python { /** * Queries if the the type specified is in the registry. */ template< typename QueryT > bool in_registry() { namespace bp = boost::python; bp::type_info info = boost::python::type_id< QueryT >(); bp::converter::registration * registration = boost::python::converter::registry::query( info ); return registration != 0; } } //namespace myrrh } //namespace python #endif //MYRRH_PYTHON_REGISTRY_H_
/** @file Copyright John Reid 2013 */ #ifndef MYRRH_PYTHON_REGISTRY_H_ #define MYRRH_PYTHON_REGISTRY_H_ #ifdef _MSC_VER # pragma once #endif //_MSC_VER namespace myrrh { namespace python { /** * Queries if the the type specified is in the registry. This can be used to avoid registering * the same type more than once. */ template< typename QueryT > const boost::python::converter::registration * get_registration() { namespace bp = boost::python; const bp::type_info info = boost::python::type_id< QueryT >(); const bp::converter::registration * registration = boost::python::converter::registry::query( info ); // need to check for m_to_python converter: see http://stackoverflow.com/a/13017303/959926 if( registration != 0 && registration->m_to_python != 0 ) { return registration; } else { return 0; } } } //namespace myrrh } //namespace python #endif //MYRRH_PYTHON_REGISTRY_H_
Fix for inner class registration check
Fix for inner class registration check
C
mit
JohnReid/myrrh,JohnReid/myrrh
c
## Code Before: /** @file Copyright John Reid 2013 */ #ifndef MYRRH_PYTHON_REGISTRY_H_ #define MYRRH_PYTHON_REGISTRY_H_ #ifdef _MSC_VER # pragma once #endif //_MSC_VER namespace myrrh { namespace python { /** * Queries if the the type specified is in the registry. */ template< typename QueryT > bool in_registry() { namespace bp = boost::python; bp::type_info info = boost::python::type_id< QueryT >(); bp::converter::registration * registration = boost::python::converter::registry::query( info ); return registration != 0; } } //namespace myrrh } //namespace python #endif //MYRRH_PYTHON_REGISTRY_H_ ## Instruction: Fix for inner class registration check ## Code After: /** @file Copyright John Reid 2013 */ #ifndef MYRRH_PYTHON_REGISTRY_H_ #define MYRRH_PYTHON_REGISTRY_H_ #ifdef _MSC_VER # pragma once #endif //_MSC_VER namespace myrrh { namespace python { /** * Queries if the the type specified is in the registry. This can be used to avoid registering * the same type more than once. */ template< typename QueryT > const boost::python::converter::registration * get_registration() { namespace bp = boost::python; const bp::type_info info = boost::python::type_id< QueryT >(); const bp::converter::registration * registration = boost::python::converter::registry::query( info ); // need to check for m_to_python converter: see http://stackoverflow.com/a/13017303/959926 if( registration != 0 && registration->m_to_python != 0 ) { return registration; } else { return 0; } } } //namespace myrrh } //namespace python #endif //MYRRH_PYTHON_REGISTRY_H_
# ... existing code ... namespace python { /** * Queries if the the type specified is in the registry. This can be used to avoid registering * the same type more than once. */ template< typename QueryT > const boost::python::converter::registration * get_registration() { namespace bp = boost::python; const bp::type_info info = boost::python::type_id< QueryT >(); const bp::converter::registration * registration = boost::python::converter::registry::query( info ); // need to check for m_to_python converter: see http://stackoverflow.com/a/13017303/959926 if( registration != 0 && registration->m_to_python != 0 ) { return registration; } else { return 0; } } } //namespace myrrh # ... rest of the code ...
21a451d7f90a7953e0eb64101188b136d076ad23
ouzel/RenderTarget.h
ouzel/RenderTarget.h
// Copyright (C) 2016 Elviss Strazdins // This file is part of the Ouzel engine. #pragma once #include <algorithm> #include <memory> #include "Types.h" #include "Noncopyable.h" #include "Size2.h" namespace ouzel { namespace graphics { class Renderer; class Texture; class RenderTarget: public Noncopyable { friend Renderer; public: virtual ~RenderTarget(); virtual bool init(const Size2& newSize, bool useDepthBuffer); TexturePtr getTexture() const { return texture; } protected: RenderTarget(); Size2 size; bool depthBuffer = false; TexturePtr texture; }; } // namespace graphics } // namespace ouzel
// Copyright (C) 2016 Elviss Strazdins // This file is part of the Ouzel engine. #pragma once #include <algorithm> #include <memory> #include "Types.h" #include "Noncopyable.h" #include "Size2.h" #include "Color.h" namespace ouzel { namespace graphics { class Renderer; class Texture; class RenderTarget: public Noncopyable { friend Renderer; public: virtual ~RenderTarget(); virtual bool init(const Size2& newSize, bool useDepthBuffer); virtual void setClearColor(Color color) { clearColor = color; } virtual Color getClearColor() const { return clearColor; } TexturePtr getTexture() const { return texture; } protected: RenderTarget(); Size2 size; bool depthBuffer = false; Color clearColor; TexturePtr texture; }; } // namespace graphics } // namespace ouzel
Add clear color to render target
Add clear color to render target
C
unlicense
Hotspotmar/ouzel,elnormous/ouzel,elnormous/ouzel,elvman/ouzel,elvman/ouzel,elnormous/ouzel,Hotspotmar/ouzel,Hotspotmar/ouzel
c
## Code Before: // Copyright (C) 2016 Elviss Strazdins // This file is part of the Ouzel engine. #pragma once #include <algorithm> #include <memory> #include "Types.h" #include "Noncopyable.h" #include "Size2.h" namespace ouzel { namespace graphics { class Renderer; class Texture; class RenderTarget: public Noncopyable { friend Renderer; public: virtual ~RenderTarget(); virtual bool init(const Size2& newSize, bool useDepthBuffer); TexturePtr getTexture() const { return texture; } protected: RenderTarget(); Size2 size; bool depthBuffer = false; TexturePtr texture; }; } // namespace graphics } // namespace ouzel ## Instruction: Add clear color to render target ## Code After: // Copyright (C) 2016 Elviss Strazdins // This file is part of the Ouzel engine. #pragma once #include <algorithm> #include <memory> #include "Types.h" #include "Noncopyable.h" #include "Size2.h" #include "Color.h" namespace ouzel { namespace graphics { class Renderer; class Texture; class RenderTarget: public Noncopyable { friend Renderer; public: virtual ~RenderTarget(); virtual bool init(const Size2& newSize, bool useDepthBuffer); virtual void setClearColor(Color color) { clearColor = color; } virtual Color getClearColor() const { return clearColor; } TexturePtr getTexture() const { return texture; } protected: RenderTarget(); Size2 size; bool depthBuffer = false; Color clearColor; TexturePtr texture; }; } // namespace graphics } // namespace ouzel
# ... existing code ... #include "Types.h" #include "Noncopyable.h" #include "Size2.h" #include "Color.h" namespace ouzel { # ... modified code ... virtual bool init(const Size2& newSize, bool useDepthBuffer); virtual void setClearColor(Color color) { clearColor = color; } virtual Color getClearColor() const { return clearColor; } TexturePtr getTexture() const { return texture; } protected: ... Size2 size; bool depthBuffer = false; Color clearColor; TexturePtr texture; }; } // namespace graphics # ... rest of the code ...
49d5583175bef0190ccfe7aa2ad5873f41d274ff
src/com/rabenauge/gl/Texture2D.java
src/com/rabenauge/gl/Texture2D.java
package com.rabenauge.gl; import android.graphics.Bitmap; import javax.microedition.khronos.opengles.GL10; import javax.microedition.khronos.opengles.GL11; /* * Wrapper class for 2D texture objects. */ public class Texture2D extends Texture { public Texture2D(GL11 gl) { super(gl, GL10.GL_TEXTURE_2D, GL11.GL_TEXTURE_BINDING_2D); } public void setData(Bitmap bitmap, int level, boolean border) { makeCurrent(); if (!isPOT(bitmap.getWidth()) || !isPOT(bitmap.getHeight())) { bitmap=Bitmap.createScaledBitmap(bitmap, ceilPOT(bitmap.getWidth()), ceilPOT(bitmap.getHeight()), false); } android.opengl.GLUtils.texImage2D(target, level, bitmap, border?1:0); } public void setData(Bitmap bitmap) { setData(bitmap, 0, false); } }
package com.rabenauge.gl; import android.graphics.Bitmap; import javax.microedition.khronos.opengles.GL10; import javax.microedition.khronos.opengles.GL11; /* * Wrapper class for 2D texture objects. */ public class Texture2D extends Texture { public Texture2D(GL11 gl) { super(gl, GL10.GL_TEXTURE_2D, GL11.GL_TEXTURE_BINDING_2D); } public void setData(Bitmap bitmap, int level, boolean border) { int w=bitmap.getWidth(), h=bitmap.getHeight(); int w2=ceilPOT(w), h2=ceilPOT(h); if (w!=w2 || h!=h2) { bitmap=Bitmap.createScaledBitmap(bitmap, w2, h2, true); } makeCurrent(); android.opengl.GLUtils.texImage2D(target, level, bitmap, border?1:0); } public void setData(Bitmap bitmap) { setData(bitmap, 0, false); } }
Enable filtering when scaling NPOT textures; minor optimizations
Enable filtering when scaling NPOT textures; minor optimizations
Java
apache-2.0
sschuberth/parandroid
java
## Code Before: package com.rabenauge.gl; import android.graphics.Bitmap; import javax.microedition.khronos.opengles.GL10; import javax.microedition.khronos.opengles.GL11; /* * Wrapper class for 2D texture objects. */ public class Texture2D extends Texture { public Texture2D(GL11 gl) { super(gl, GL10.GL_TEXTURE_2D, GL11.GL_TEXTURE_BINDING_2D); } public void setData(Bitmap bitmap, int level, boolean border) { makeCurrent(); if (!isPOT(bitmap.getWidth()) || !isPOT(bitmap.getHeight())) { bitmap=Bitmap.createScaledBitmap(bitmap, ceilPOT(bitmap.getWidth()), ceilPOT(bitmap.getHeight()), false); } android.opengl.GLUtils.texImage2D(target, level, bitmap, border?1:0); } public void setData(Bitmap bitmap) { setData(bitmap, 0, false); } } ## Instruction: Enable filtering when scaling NPOT textures; minor optimizations ## Code After: package com.rabenauge.gl; import android.graphics.Bitmap; import javax.microedition.khronos.opengles.GL10; import javax.microedition.khronos.opengles.GL11; /* * Wrapper class for 2D texture objects. */ public class Texture2D extends Texture { public Texture2D(GL11 gl) { super(gl, GL10.GL_TEXTURE_2D, GL11.GL_TEXTURE_BINDING_2D); } public void setData(Bitmap bitmap, int level, boolean border) { int w=bitmap.getWidth(), h=bitmap.getHeight(); int w2=ceilPOT(w), h2=ceilPOT(h); if (w!=w2 || h!=h2) { bitmap=Bitmap.createScaledBitmap(bitmap, w2, h2, true); } makeCurrent(); android.opengl.GLUtils.texImage2D(target, level, bitmap, border?1:0); } public void setData(Bitmap bitmap) { setData(bitmap, 0, false); } }
... } public void setData(Bitmap bitmap, int level, boolean border) { int w=bitmap.getWidth(), h=bitmap.getHeight(); int w2=ceilPOT(w), h2=ceilPOT(h); if (w!=w2 || h!=h2) { bitmap=Bitmap.createScaledBitmap(bitmap, w2, h2, true); } makeCurrent(); android.opengl.GLUtils.texImage2D(target, level, bitmap, border?1:0); } ...
f50d192b9384664ded37326ff48a1351843f76c6
setup.py
setup.py
try: from pip._internal.req import parse_requirements except ImportError: from pip.req import parse_requirements from setuptools import find_packages from setuptools import setup setup( name='jsonapi-requests', version='0.6.1.dev0', description='Python client implementation for json api. http://jsonapi.org/', author='Social WiFi', author_email='[email protected]', url='https://github.com/socialwifi/jsonapi-requests', packages=find_packages(exclude=['tests']), install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)], setup_requires=['pytest-runner'], tests_require=['pytest', 'flask'], extras_require={ 'flask': ['flask'] }, license='BSD', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ] )
try: from pip._internal.req import parse_requirements except ImportError: from pip.req import parse_requirements from setuptools import find_packages from setuptools import setup def get_long_description(): with open('README.md') as readme_file: return readme_file.read() setup( name='jsonapi-requests', version='0.6.1.dev0', description='Python client implementation for json api. http://jsonapi.org/', long_description=get_long_description(), long_description_content_type='text/markdown', author='Social WiFi', author_email='[email protected]', url='https://github.com/socialwifi/jsonapi-requests', packages=find_packages(exclude=['tests']), install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)], setup_requires=['pytest-runner'], tests_require=['pytest', 'flask'], extras_require={ 'flask': ['flask'] }, license='BSD', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ] )
Add long description to package.
Add long description to package.
Python
bsd-3-clause
socialwifi/jsonapi-requests
python
## Code Before: try: from pip._internal.req import parse_requirements except ImportError: from pip.req import parse_requirements from setuptools import find_packages from setuptools import setup setup( name='jsonapi-requests', version='0.6.1.dev0', description='Python client implementation for json api. http://jsonapi.org/', author='Social WiFi', author_email='[email protected]', url='https://github.com/socialwifi/jsonapi-requests', packages=find_packages(exclude=['tests']), install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)], setup_requires=['pytest-runner'], tests_require=['pytest', 'flask'], extras_require={ 'flask': ['flask'] }, license='BSD', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ] ) ## Instruction: Add long description to package. ## Code After: try: from pip._internal.req import parse_requirements except ImportError: from pip.req import parse_requirements from setuptools import find_packages from setuptools import setup def get_long_description(): with open('README.md') as readme_file: return readme_file.read() setup( name='jsonapi-requests', version='0.6.1.dev0', description='Python client implementation for json api. http://jsonapi.org/', long_description=get_long_description(), long_description_content_type='text/markdown', author='Social WiFi', author_email='[email protected]', url='https://github.com/socialwifi/jsonapi-requests', packages=find_packages(exclude=['tests']), install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)], setup_requires=['pytest-runner'], tests_require=['pytest', 'flask'], extras_require={ 'flask': ['flask'] }, license='BSD', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ] )
... from setuptools import setup def get_long_description(): with open('README.md') as readme_file: return readme_file.read() setup( name='jsonapi-requests', version='0.6.1.dev0', description='Python client implementation for json api. http://jsonapi.org/', long_description=get_long_description(), long_description_content_type='text/markdown', author='Social WiFi', author_email='[email protected]', url='https://github.com/socialwifi/jsonapi-requests', ...
909d546a96e48a40b6b6e503be0451666ba305a3
src/main/java/fr/aumgn/dac2/DAC.java
src/main/java/fr/aumgn/dac2/DAC.java
package fr.aumgn.dac2; import fr.aumgn.bukkitutils.localization.PluginResourceBundles; import fr.aumgn.bukkitutils.localization.bundle.PluginResourceBundle; import fr.aumgn.dac2.config.DACConfig; public class DAC { private final DACPlugin plugin; private DACConfig config; private PluginResourceBundle cmdMessages; private PluginResourceBundle messages; public DAC(DACPlugin plugin) { this.plugin = plugin; reloadData(); } public DACPlugin getPlugin() { return plugin; } public DACConfig getConfig() { return config; } public PluginResourceBundle getCmdMessages() { return cmdMessages; } public PluginResourceBundle getMessages() { return messages; } public void reloadData() { config = plugin.reloadDACConfig(); PluginResourceBundles bundles = new PluginResourceBundles(plugin, config.getLocale(), plugin.getDataFolder()); cmdMessages = bundles.get("commands"); messages = bundles.get("messages"); } }
package fr.aumgn.dac2; import fr.aumgn.bukkitutils.localization.PluginResourceBundles; import fr.aumgn.bukkitutils.localization.bundle.PluginResourceBundle; import fr.aumgn.dac2.config.DACConfig; public class DAC { private final DACPlugin plugin; private DACConfig config; private PluginResourceBundle cmdMessages; private PluginResourceBundle messages; public DAC(DACPlugin plugin) { this.plugin = plugin; reloadData(); } public DACPlugin getPlugin() { return plugin; } public DACConfig getConfig() { return config; } public PluginResourceBundle getCmdMessages() { return cmdMessages; } public PluginResourceBundle getMessages() { return messages; } public void reloadData() { config = plugin.reloadDACConfig(); PluginResourceBundles.clearCache(plugin); PluginResourceBundles bundles = new PluginResourceBundles(plugin, config.getLocale(), plugin.getDataFolder()); cmdMessages = bundles.get("commands"); messages = bundles.get("messages"); } }
Clear ResourceBundle cache on reload
Clear ResourceBundle cache on reload
Java
mit
aumgn/DACv2,Loscillo/DACv2-temp
java
## Code Before: package fr.aumgn.dac2; import fr.aumgn.bukkitutils.localization.PluginResourceBundles; import fr.aumgn.bukkitutils.localization.bundle.PluginResourceBundle; import fr.aumgn.dac2.config.DACConfig; public class DAC { private final DACPlugin plugin; private DACConfig config; private PluginResourceBundle cmdMessages; private PluginResourceBundle messages; public DAC(DACPlugin plugin) { this.plugin = plugin; reloadData(); } public DACPlugin getPlugin() { return plugin; } public DACConfig getConfig() { return config; } public PluginResourceBundle getCmdMessages() { return cmdMessages; } public PluginResourceBundle getMessages() { return messages; } public void reloadData() { config = plugin.reloadDACConfig(); PluginResourceBundles bundles = new PluginResourceBundles(plugin, config.getLocale(), plugin.getDataFolder()); cmdMessages = bundles.get("commands"); messages = bundles.get("messages"); } } ## Instruction: Clear ResourceBundle cache on reload ## Code After: package fr.aumgn.dac2; import fr.aumgn.bukkitutils.localization.PluginResourceBundles; import fr.aumgn.bukkitutils.localization.bundle.PluginResourceBundle; import fr.aumgn.dac2.config.DACConfig; public class DAC { private final DACPlugin plugin; private DACConfig config; private PluginResourceBundle cmdMessages; private PluginResourceBundle messages; public DAC(DACPlugin plugin) { this.plugin = plugin; reloadData(); } public DACPlugin getPlugin() { return plugin; } public DACConfig getConfig() { return config; } public PluginResourceBundle getCmdMessages() { return cmdMessages; } public PluginResourceBundle getMessages() { return messages; } public void reloadData() { config = plugin.reloadDACConfig(); PluginResourceBundles.clearCache(plugin); PluginResourceBundles bundles = new PluginResourceBundles(plugin, config.getLocale(), plugin.getDataFolder()); cmdMessages = bundles.get("commands"); messages = bundles.get("messages"); } }
... public void reloadData() { config = plugin.reloadDACConfig(); PluginResourceBundles.clearCache(plugin); PluginResourceBundles bundles = new PluginResourceBundles(plugin, config.getLocale(), plugin.getDataFolder()); cmdMessages = bundles.get("commands"); ...
088a3aedb31e5abdf0113f1259a2ef70eac6ee60
src/bin/e_int_config_modules.h
src/bin/e_int_config_modules.h
/* * vim:ts=8:sw=3:sts=8:noexpandtab:cino=>5n-3f0^-2{2 */ #ifdef E_TYPEDEFS #else #ifndef E_INT_CONFIG_MODULES_H #define E_INT_CONFIG_MODULES_H EAPI E_Config_Dialog *e_int_config_modules(E_Container *con, const char *params __UNUSED__); #endif #endif
/* * vim:ts=8:sw=3:sts=8:noexpandtab:cino=>5n-3f0^-2{2 */ #ifdef E_TYPEDEFS #else #ifndef E_INT_CONFIG_MODULES_H #define E_INT_CONFIG_MODULES_H EAPI E_Config_Dialog *e_int_config_modules(E_Container *con, const char *params); #endif #endif
Remove __UNUSED__ as it doesn't make sense here.
Remove __UNUSED__ as it doesn't make sense here. git-svn-id: 6ac5796aeae0cef97fb47bcc287d4ce899c6fa6e@38188 7cbeb6ba-43b4-40fd-8cce-4c39aea84d33
C
bsd-2-clause
jordemort/e17,jordemort/e17,jordemort/e17
c
## Code Before: /* * vim:ts=8:sw=3:sts=8:noexpandtab:cino=>5n-3f0^-2{2 */ #ifdef E_TYPEDEFS #else #ifndef E_INT_CONFIG_MODULES_H #define E_INT_CONFIG_MODULES_H EAPI E_Config_Dialog *e_int_config_modules(E_Container *con, const char *params __UNUSED__); #endif #endif ## Instruction: Remove __UNUSED__ as it doesn't make sense here. git-svn-id: 6ac5796aeae0cef97fb47bcc287d4ce899c6fa6e@38188 7cbeb6ba-43b4-40fd-8cce-4c39aea84d33 ## Code After: /* * vim:ts=8:sw=3:sts=8:noexpandtab:cino=>5n-3f0^-2{2 */ #ifdef E_TYPEDEFS #else #ifndef E_INT_CONFIG_MODULES_H #define E_INT_CONFIG_MODULES_H EAPI E_Config_Dialog *e_int_config_modules(E_Container *con, const char *params); #endif #endif
// ... existing code ... #ifndef E_INT_CONFIG_MODULES_H #define E_INT_CONFIG_MODULES_H EAPI E_Config_Dialog *e_int_config_modules(E_Container *con, const char *params); #endif #endif // ... rest of the code ...
0039eefbfa546f24b3f10031e664341d60e4055c
ranger/commands.py
ranger/commands.py
from ranger.api.commands import Command class fzf_select(Command): """ :fzf_select Find a file using fzf. With a prefix argument select only directories. See: https://github.com/junegunn/fzf """ def execute(self): import subprocess import os.path if self.quantifier: # match only directories command="fd -t d --hidden | fzf +m" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -type d -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" else: # match files and directories command="fd --hidden | fzf +m" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" fzf = self.fm.execute_command(command, universal_newlines=True, stdout=subprocess.PIPE) stdout, stderr = fzf.communicate() if fzf.returncode == 0: fzf_file = os.path.abspath(stdout.rstrip('\n')) if os.path.isdir(fzf_file): self.fm.cd(fzf_file) else: self.fm.select_file(fzf_file)
from ranger.api.commands import Command class fzf_select(Command): """ :fzf_select Find a file using fzf. With a prefix argument select only directories. See: https://github.com/junegunn/fzf """ def execute(self): import subprocess import os.path if self.quantifier: # match only directories command="fd -t d --hidden | fzf +m --preview 'cat {}'" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -type d -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" else: # match files and directories command="fd --hidden | fzf +m --preview 'cat {}'" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" fzf = self.fm.execute_command(command, universal_newlines=True, stdout=subprocess.PIPE) stdout, stderr = fzf.communicate() if fzf.returncode == 0: fzf_file = os.path.abspath(stdout.rstrip('\n')) if os.path.isdir(fzf_file): self.fm.cd(fzf_file) else: self.fm.select_file(fzf_file)
Use previews in ranger fzf
Use previews in ranger fzf
Python
mit
darthdeus/dotfiles,darthdeus/dotfiles,darthdeus/dotfiles,darthdeus/dotfiles
python
## Code Before: from ranger.api.commands import Command class fzf_select(Command): """ :fzf_select Find a file using fzf. With a prefix argument select only directories. See: https://github.com/junegunn/fzf """ def execute(self): import subprocess import os.path if self.quantifier: # match only directories command="fd -t d --hidden | fzf +m" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -type d -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" else: # match files and directories command="fd --hidden | fzf +m" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" fzf = self.fm.execute_command(command, universal_newlines=True, stdout=subprocess.PIPE) stdout, stderr = fzf.communicate() if fzf.returncode == 0: fzf_file = os.path.abspath(stdout.rstrip('\n')) if os.path.isdir(fzf_file): self.fm.cd(fzf_file) else: self.fm.select_file(fzf_file) ## Instruction: Use previews in ranger fzf ## Code After: from ranger.api.commands import Command class fzf_select(Command): """ :fzf_select Find a file using fzf. With a prefix argument select only directories. See: https://github.com/junegunn/fzf """ def execute(self): import subprocess import os.path if self.quantifier: # match only directories command="fd -t d --hidden | fzf +m --preview 'cat {}'" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -type d -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" else: # match files and directories command="fd --hidden | fzf +m --preview 'cat {}'" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" fzf = self.fm.execute_command(command, universal_newlines=True, stdout=subprocess.PIPE) stdout, stderr = fzf.communicate() if fzf.returncode == 0: fzf_file = os.path.abspath(stdout.rstrip('\n')) if os.path.isdir(fzf_file): self.fm.cd(fzf_file) else: self.fm.select_file(fzf_file)
// ... existing code ... import os.path if self.quantifier: # match only directories command="fd -t d --hidden | fzf +m --preview 'cat {}'" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -type d -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" else: # match files and directories command="fd --hidden | fzf +m --preview 'cat {}'" # command="find -L . \( -path '*/\.*' -o -fstype 'dev' -o -fstype 'proc' \) -prune \ # -o -print 2> /dev/null | sed 1d | cut -b3- | fzf +m" fzf = self.fm.execute_command(command, universal_newlines=True, stdout=subprocess.PIPE) // ... rest of the code ...
a552e00553189ea2a39c0bc5ff7ea463d6c64a2a
src/main/java/ws/biotea/ld2rdf/rdf/model/BaseAnnotation.java
src/main/java/ws/biotea/ld2rdf/rdf/model/BaseAnnotation.java
package ws.biotea.ld2rdf.rdf.model; import ws.biotea.ld2rdf.util.OntologyPrefix; import java.io.Serializable; /** * OpenAnnotation: This class represents a general annotation on a Document. * @author leylajael */ public class BaseAnnotation implements Serializable { private static final long serialVersionUID = 1L; /* OWL Descriptors */ public final static String ANNOTATION_DP_LABEL = OntologyPrefix.RDFS.getURL() + "label"; public final static String RDFS_COMMENT = OntologyPrefix.RDFS.getURL() + "comment"; public final static String BIOTEA_OCURRENCES = OntologyPrefix.BIOTEA.getURL() + "tf"; public final static String BIOTEA_IDF = OntologyPrefix.BIOTEA.getURL() + "idf"; public final static String RDFS_SEE_ALSO = OntologyPrefix.RDFS.getURL() + "seeAlso"; public final static String OWL_SAME_AS = OntologyPrefix.OWL.getURL() + "sameAs"; public final static String DC_IS_REFERENCED_BY = OntologyPrefix.DCTERMS.getURL() + "isReferencedBy"; public final static String VOID_IN_DATASET = OntologyPrefix.VOID.getURL() + "inDataset"; public final static String DP_SCORE = OntologyPrefix.BIOTEA.getURL() + "score"; }
package ws.biotea.ld2rdf.rdf.model; import ws.biotea.ld2rdf.util.OntologyPrefix; import java.io.Serializable; /** * OpenAnnotation: This class represents a general annotation on a Document. * @author leylajael */ public class BaseAnnotation implements Serializable { private static final long serialVersionUID = 1L; /* OWL Descriptors */ public final static String ANNOTATION_DP_LABEL = OntologyPrefix.RDFS.getURL() + "label"; public final static String RDFS_COMMENT = OntologyPrefix.RDFS.getURL() + "comment"; public final static String BIOTEA_OCURRENCES = OntologyPrefix.BIOTEA.getURL() + "tf"; public final static String BIOTEA_IDF = OntologyPrefix.BIOTEA.getURL() + "idf"; public final static String RDFS_SEE_ALSO = OntologyPrefix.RDFS.getURL() + "seeAlso"; public final static String OWL_SAME_AS = OntologyPrefix.OWL.getURL() + "sameAs"; public final static String DC_IS_REFERENCED_BY = OntologyPrefix.DCTERMS.getURL() + "isReferencedBy"; public final static String SIO_IN_DATASET = OntologyPrefix.SIO.getURL() + "001278"; public final static String DP_SCORE = OntologyPrefix.BIOTEA.getURL() + "score"; }
Use SIO:is_data _item in rather than void:inDataset
Use SIO:is_data _item in rather than void:inDataset
Java
apache-2.0
biotea/biotea-ao
java
## Code Before: package ws.biotea.ld2rdf.rdf.model; import ws.biotea.ld2rdf.util.OntologyPrefix; import java.io.Serializable; /** * OpenAnnotation: This class represents a general annotation on a Document. * @author leylajael */ public class BaseAnnotation implements Serializable { private static final long serialVersionUID = 1L; /* OWL Descriptors */ public final static String ANNOTATION_DP_LABEL = OntologyPrefix.RDFS.getURL() + "label"; public final static String RDFS_COMMENT = OntologyPrefix.RDFS.getURL() + "comment"; public final static String BIOTEA_OCURRENCES = OntologyPrefix.BIOTEA.getURL() + "tf"; public final static String BIOTEA_IDF = OntologyPrefix.BIOTEA.getURL() + "idf"; public final static String RDFS_SEE_ALSO = OntologyPrefix.RDFS.getURL() + "seeAlso"; public final static String OWL_SAME_AS = OntologyPrefix.OWL.getURL() + "sameAs"; public final static String DC_IS_REFERENCED_BY = OntologyPrefix.DCTERMS.getURL() + "isReferencedBy"; public final static String VOID_IN_DATASET = OntologyPrefix.VOID.getURL() + "inDataset"; public final static String DP_SCORE = OntologyPrefix.BIOTEA.getURL() + "score"; } ## Instruction: Use SIO:is_data _item in rather than void:inDataset ## Code After: package ws.biotea.ld2rdf.rdf.model; import ws.biotea.ld2rdf.util.OntologyPrefix; import java.io.Serializable; /** * OpenAnnotation: This class represents a general annotation on a Document. * @author leylajael */ public class BaseAnnotation implements Serializable { private static final long serialVersionUID = 1L; /* OWL Descriptors */ public final static String ANNOTATION_DP_LABEL = OntologyPrefix.RDFS.getURL() + "label"; public final static String RDFS_COMMENT = OntologyPrefix.RDFS.getURL() + "comment"; public final static String BIOTEA_OCURRENCES = OntologyPrefix.BIOTEA.getURL() + "tf"; public final static String BIOTEA_IDF = OntologyPrefix.BIOTEA.getURL() + "idf"; public final static String RDFS_SEE_ALSO = OntologyPrefix.RDFS.getURL() + "seeAlso"; public final static String OWL_SAME_AS = OntologyPrefix.OWL.getURL() + "sameAs"; public final static String DC_IS_REFERENCED_BY = OntologyPrefix.DCTERMS.getURL() + "isReferencedBy"; public final static String SIO_IN_DATASET = OntologyPrefix.SIO.getURL() + "001278"; public final static String DP_SCORE = OntologyPrefix.BIOTEA.getURL() + "score"; }
# ... existing code ... public final static String RDFS_SEE_ALSO = OntologyPrefix.RDFS.getURL() + "seeAlso"; public final static String OWL_SAME_AS = OntologyPrefix.OWL.getURL() + "sameAs"; public final static String DC_IS_REFERENCED_BY = OntologyPrefix.DCTERMS.getURL() + "isReferencedBy"; public final static String SIO_IN_DATASET = OntologyPrefix.SIO.getURL() + "001278"; public final static String DP_SCORE = OntologyPrefix.BIOTEA.getURL() + "score"; } # ... rest of the code ...
cd374366dc6d49cc543a037fba8398e5b724c382
tabula/util.py
tabula/util.py
import warnings import platform def deprecated(func): """This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emmitted when the function is used.""" def newFunc(*args, **kwargs): warnings.warn("Call to deprecated function {}.".format(func.__name__), category=DeprecationWarning, stacklevel=2) return func(*args, **kwargs) newFunc.__name__ = func.__name__ newFunc.__doc__ = func.__doc__ newFunc.__dict__.update(func.__dict__) return newFunc def deprecated_option(option): warnings.warn("Call to deprecated option {}.".format(option), category=DeprecationWarning, stacklevel=2) def java_version(): import subprocess try: res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT) res = res.decode() except subprocess.CalledProcessError as e: res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`" return res def environment_info(): import sys import distro import textwrap from .__version__ import __version__ print("""Python version: {} Java version: {} tabula-py version: {} platform: {} uname: {} linux_distribution: {} mac_ver: {} """.format( sys.version, textwrap.indent(java_version().strip(), " "), __version__, platform.platform(), str(platform.uname()), distro.linux_distribution(), platform.mac_ver(), ))
import warnings import platform def deprecated(func): """This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emmitted when the function is used.""" def newFunc(*args, **kwargs): warnings.warn("Call to deprecated function {}.".format(func.__name__), category=DeprecationWarning, stacklevel=2) return func(*args, **kwargs) newFunc.__name__ = func.__name__ newFunc.__doc__ = func.__doc__ newFunc.__dict__.update(func.__dict__) return newFunc def deprecated_option(option): warnings.warn("Call to deprecated option {}.".format(option), category=DeprecationWarning, stacklevel=2) def java_version(): import subprocess try: res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT) res = res.decode() except subprocess.CalledProcessError as e: res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`" return res def environment_info(): import sys import distro from .__version__ import __version__ print("""Python version: {} Java version: {} tabula-py version: {} platform: {} uname: {} linux_distribution: {} mac_ver: {} """.format( sys.version, java_version().strip(), __version__, platform.platform(), str(platform.uname()), distro.linux_distribution(), platform.mac_ver(), ))
Remove textwrap because python 2.7 lacks indent() function
Remove textwrap because python 2.7 lacks indent() function
Python
mit
chezou/tabula-py
python
## Code Before: import warnings import platform def deprecated(func): """This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emmitted when the function is used.""" def newFunc(*args, **kwargs): warnings.warn("Call to deprecated function {}.".format(func.__name__), category=DeprecationWarning, stacklevel=2) return func(*args, **kwargs) newFunc.__name__ = func.__name__ newFunc.__doc__ = func.__doc__ newFunc.__dict__.update(func.__dict__) return newFunc def deprecated_option(option): warnings.warn("Call to deprecated option {}.".format(option), category=DeprecationWarning, stacklevel=2) def java_version(): import subprocess try: res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT) res = res.decode() except subprocess.CalledProcessError as e: res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`" return res def environment_info(): import sys import distro import textwrap from .__version__ import __version__ print("""Python version: {} Java version: {} tabula-py version: {} platform: {} uname: {} linux_distribution: {} mac_ver: {} """.format( sys.version, textwrap.indent(java_version().strip(), " "), __version__, platform.platform(), str(platform.uname()), distro.linux_distribution(), platform.mac_ver(), )) ## Instruction: Remove textwrap because python 2.7 lacks indent() function ## Code After: import warnings import platform def deprecated(func): """This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emmitted when the function is used.""" def newFunc(*args, **kwargs): warnings.warn("Call to deprecated function {}.".format(func.__name__), category=DeprecationWarning, stacklevel=2) return func(*args, **kwargs) newFunc.__name__ = func.__name__ newFunc.__doc__ = func.__doc__ newFunc.__dict__.update(func.__dict__) return newFunc def deprecated_option(option): warnings.warn("Call to deprecated option {}.".format(option), category=DeprecationWarning, stacklevel=2) def java_version(): import subprocess try: res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT) res = res.decode() except subprocess.CalledProcessError as e: res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`" return res def environment_info(): import sys import distro from .__version__ import __version__ print("""Python version: {} Java version: {} tabula-py version: {} platform: {} uname: {} linux_distribution: {} mac_ver: {} """.format( sys.version, java_version().strip(), __version__, platform.platform(), str(platform.uname()), distro.linux_distribution(), platform.mac_ver(), ))
# ... existing code ... def environment_info(): import sys import distro from .__version__ import __version__ print("""Python version: {} Java version: {} tabula-py version: {} platform: {} uname: # ... modified code ... mac_ver: {} """.format( sys.version, java_version().strip(), __version__, platform.platform(), str(platform.uname()), # ... rest of the code ...
c5a2c7e802d89ea17a7f0fd1a9194eaab8eaf61d
wcontrol/src/main.py
wcontrol/src/main.py
from flask import Flask app = Flask(__name__) app.config.from_object("config")
import os from flask import Flask app = Flask(__name__) app.config.from_object(os.environ.get("WCONTROL_CONF"))
Use a env var to get config
Use a env var to get config
Python
mit
pahumadad/weight-control,pahumadad/weight-control,pahumadad/weight-control,pahumadad/weight-control
python
## Code Before: from flask import Flask app = Flask(__name__) app.config.from_object("config") ## Instruction: Use a env var to get config ## Code After: import os from flask import Flask app = Flask(__name__) app.config.from_object(os.environ.get("WCONTROL_CONF"))
// ... existing code ... import os from flask import Flask app = Flask(__name__) app.config.from_object(os.environ.get("WCONTROL_CONF")) // ... rest of the code ...
12046a341b8dbd0de3fc41b3a595bf8081a602b4
src/jwampsharp.defaultBinding/src/main/java/co/codesharp/jwampsharp/defaultBinding/DefaultWampChannelFactory.java
src/jwampsharp.defaultBinding/src/main/java/co/codesharp/jwampsharp/defaultBinding/DefaultWampChannelFactory.java
package co.codesharp.jwampsharp.defaultBinding; import co.codesharp.jwampsharp.api.client.WampChannel; import co.codesharp.jwampsharp.api.client.WampChannelFactoryImpl; import co.codesharp.jwampsharp.core.binding.WampTextBinding; import co.codesharp.jwampsharp.defaultBinding.jsr.WebsocketWampTextConnection; import java.net.URI; /** * Created by Elad on 7/11/2014. */ public class DefaultWampChannelFactory extends WampChannelFactoryImpl { public <TMessage> WampChannel createChannel (URI address, String realm, WampTextBinding<TMessage> binding) { WebsocketWampTextConnection<TMessage> connection = new WebsocketWampTextConnection<TMessage>(address, binding); return this.createChannel(realm, connection, binding); } public WampChannel createJsonChannel(URI address, String realm) { JsonNodeBinding binding = new JsonNodeBinding(); return this.createChannel(address, realm, binding); } }
package co.codesharp.jwampsharp.defaultBinding; import co.codesharp.jwampsharp.api.client.WampChannel; import co.codesharp.jwampsharp.api.client.WampChannelFactoryImpl; import co.codesharp.jwampsharp.core.binding.WampTextBinding; import co.codesharp.jwampsharp.defaultBinding.jsr.WebsocketWampTextConnection; import java.net.URI; /** * Created by Elad on 7/11/2014. */ public class DefaultWampChannelFactory extends WampChannelFactoryImpl { private JsonNodeBinding jsonNodeBinding = new JsonNodeBinding(); public <TMessage> WampChannel createChannel (URI address, String realm, WampTextBinding<TMessage> binding) { WebsocketWampTextConnection<TMessage> connection = new WebsocketWampTextConnection<TMessage>(address, binding); return this.createChannel(realm, connection, binding); } public WampChannel createJsonChannel(URI address, String realm) { return this.createChannel(address, realm, jsonNodeBinding); } }
Use the same reference for all method calls
Use the same reference for all method calls
Java
bsd-2-clause
Code-Sharp/JWampSharp
java
## Code Before: package co.codesharp.jwampsharp.defaultBinding; import co.codesharp.jwampsharp.api.client.WampChannel; import co.codesharp.jwampsharp.api.client.WampChannelFactoryImpl; import co.codesharp.jwampsharp.core.binding.WampTextBinding; import co.codesharp.jwampsharp.defaultBinding.jsr.WebsocketWampTextConnection; import java.net.URI; /** * Created by Elad on 7/11/2014. */ public class DefaultWampChannelFactory extends WampChannelFactoryImpl { public <TMessage> WampChannel createChannel (URI address, String realm, WampTextBinding<TMessage> binding) { WebsocketWampTextConnection<TMessage> connection = new WebsocketWampTextConnection<TMessage>(address, binding); return this.createChannel(realm, connection, binding); } public WampChannel createJsonChannel(URI address, String realm) { JsonNodeBinding binding = new JsonNodeBinding(); return this.createChannel(address, realm, binding); } } ## Instruction: Use the same reference for all method calls ## Code After: package co.codesharp.jwampsharp.defaultBinding; import co.codesharp.jwampsharp.api.client.WampChannel; import co.codesharp.jwampsharp.api.client.WampChannelFactoryImpl; import co.codesharp.jwampsharp.core.binding.WampTextBinding; import co.codesharp.jwampsharp.defaultBinding.jsr.WebsocketWampTextConnection; import java.net.URI; /** * Created by Elad on 7/11/2014. */ public class DefaultWampChannelFactory extends WampChannelFactoryImpl { private JsonNodeBinding jsonNodeBinding = new JsonNodeBinding(); public <TMessage> WampChannel createChannel (URI address, String realm, WampTextBinding<TMessage> binding) { WebsocketWampTextConnection<TMessage> connection = new WebsocketWampTextConnection<TMessage>(address, binding); return this.createChannel(realm, connection, binding); } public WampChannel createJsonChannel(URI address, String realm) { return this.createChannel(address, realm, jsonNodeBinding); } }
# ... existing code ... * Created by Elad on 7/11/2014. */ public class DefaultWampChannelFactory extends WampChannelFactoryImpl { private JsonNodeBinding jsonNodeBinding = new JsonNodeBinding(); public <TMessage> WampChannel createChannel (URI address, String realm, # ... modified code ... public WampChannel createJsonChannel(URI address, String realm) { return this.createChannel(address, realm, jsonNodeBinding); } } # ... rest of the code ...
0ce14be170e09530b225f2f7526ad68ee1758095
peering/migrations/0027_auto_20190105_1600.py
peering/migrations/0027_auto_20190105_1600.py
import django.contrib.postgres.fields from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ( "peering", "0026_autonomoussystem_potential_internet_exchange_peering_sessions", ) ] operations = [ migrations.AlterField( model_name="autonomoussystem", name="potential_internet_exchange_peering_sessions", field=django.contrib.postgres.fields.ArrayField( base_field=models.GenericIPAddressField(), blank=True, default=list, size=None, ), ) ]
import django.contrib.postgres.fields from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ( "peering", "0026_autonomoussystem_potential_internet_exchange_peering_sessions", ) ] def forwards_func(apps, schema_editor): AutonomousSystem = apps.get_model("peering", "AutonomousSystem") db_alias = schema_editor.connection.alias AutonomousSystem.objects.using(db_alias).filter( potential_internet_exchange_peering_sessions=None ).update(potential_internet_exchange_peering_sessions=[]) def reverse_func(apps, schema_editor): AutonomousSystem = apps.get_model("peering", "AutonomousSystem") db_alias = schema_editor.connection.alias AutonomousSystem.objects.using(db_alias).filter( potential_internet_exchange_peering_sessions=[] ).update(potential_internet_exchange_peering_sessions=None) operations = [ migrations.AlterField( model_name="autonomoussystem", name="potential_internet_exchange_peering_sessions", field=django.contrib.postgres.fields.ArrayField( base_field=models.GenericIPAddressField(), blank=True, default=list, size=None, ), ), migrations.RunPython(forwards_func, reverse_func), ]
Fix issue with migrations introduced lately.
Fix issue with migrations introduced lately.
Python
apache-2.0
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
python
## Code Before: import django.contrib.postgres.fields from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ( "peering", "0026_autonomoussystem_potential_internet_exchange_peering_sessions", ) ] operations = [ migrations.AlterField( model_name="autonomoussystem", name="potential_internet_exchange_peering_sessions", field=django.contrib.postgres.fields.ArrayField( base_field=models.GenericIPAddressField(), blank=True, default=list, size=None, ), ) ] ## Instruction: Fix issue with migrations introduced lately. ## Code After: import django.contrib.postgres.fields from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ( "peering", "0026_autonomoussystem_potential_internet_exchange_peering_sessions", ) ] def forwards_func(apps, schema_editor): AutonomousSystem = apps.get_model("peering", "AutonomousSystem") db_alias = schema_editor.connection.alias AutonomousSystem.objects.using(db_alias).filter( potential_internet_exchange_peering_sessions=None ).update(potential_internet_exchange_peering_sessions=[]) def reverse_func(apps, schema_editor): AutonomousSystem = apps.get_model("peering", "AutonomousSystem") db_alias = schema_editor.connection.alias AutonomousSystem.objects.using(db_alias).filter( potential_internet_exchange_peering_sessions=[] ).update(potential_internet_exchange_peering_sessions=None) operations = [ migrations.AlterField( model_name="autonomoussystem", name="potential_internet_exchange_peering_sessions", field=django.contrib.postgres.fields.ArrayField( base_field=models.GenericIPAddressField(), blank=True, default=list, size=None, ), ), migrations.RunPython(forwards_func, reverse_func), ]
// ... existing code ... ) ] def forwards_func(apps, schema_editor): AutonomousSystem = apps.get_model("peering", "AutonomousSystem") db_alias = schema_editor.connection.alias AutonomousSystem.objects.using(db_alias).filter( potential_internet_exchange_peering_sessions=None ).update(potential_internet_exchange_peering_sessions=[]) def reverse_func(apps, schema_editor): AutonomousSystem = apps.get_model("peering", "AutonomousSystem") db_alias = schema_editor.connection.alias AutonomousSystem.objects.using(db_alias).filter( potential_internet_exchange_peering_sessions=[] ).update(potential_internet_exchange_peering_sessions=None) operations = [ migrations.AlterField( model_name="autonomoussystem", // ... modified code ... default=list, size=None, ), ), migrations.RunPython(forwards_func, reverse_func), ] // ... rest of the code ...
61de7c1827867cea3385c5db3862e5e68caa98fd
Puli/src/octopus/dispatcher/rules/graphview.py
Puli/src/octopus/dispatcher/rules/graphview.py
from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup from octopus.dispatcher import rules import logging logger = logging.getLogger("dispatcher") class RuleError(rules.RuleError): '''Base class for GraphViewBuilder related exceptions.''' pass class TaskNodeHasNoChildrenError(RuleError): '''Raised when a GraphViewBuilder is requested to add a child node to a FolderNode. ''' class GraphViewBuilder(object): def __init__(self, dispatchTree, root): self.dispatchTree = dispatchTree self.root = root def apply(self, task): id = None name = task.name parent = task.parent.nodes['graph_rule'] if task.parent else self.root user = task.user priority = task.priority dispatchKey = task.dispatchKey maxRN = task.maxRN if isinstance(task, TaskGroup): strategy = task.strategy node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN, strategy, taskGroup=task) else: node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task) task.nodes['graph_rule'] = node return [node] def processDependencies(self, dependencies): for task, taskdeps in dependencies.items(): node = task.nodes['graph_rule'] for deptask, statuslist in taskdeps.items(): depnode = deptask.nodes['graph_rule'] node.addDependency(depnode, statuslist)
from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup from octopus.dispatcher import rules import logging logger = logging.getLogger("dispatcher") class RuleError(rules.RuleError): '''Base class for GraphViewBuilder related exceptions.''' pass class TaskNodeHasNoChildrenError(RuleError): '''Raised when a GraphViewBuilder is requested to add a child node to a FolderNode. ''' class GraphViewBuilder(object): def __init__(self, dispatchTree, root): self.dispatchTree = dispatchTree self.root = root def apply(self, task): id = None name = task.name parent = task.parent.nodes['graph_rule'] if task.parent else self.root user = task.user priority = task.priority dispatchKey = task.dispatchKey maxRN = task.maxRN if isinstance(task, TaskGroup): strategy = task.strategy node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN, strategy, taskGroup=task) else: node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task) task.nodes['graph_rule'] = node return [node] def processDependencies(self, dependencies): for task, taskdeps in dependencies.items(): node = task.nodes['graph_rule'] for deptask, statuslist in taskdeps.items(): depnode = deptask.nodes['graph_rule'] node.addDependency(depnode, statuslist) def __repr__(self): return "GraphViewBuilder( root=%r, dispatchTree=%r )" % (self.root, self.dispatchTree )
Add a representation of GraphView object
Add a representation of GraphView object
Python
bsd-3-clause
mikrosimage/OpenRenderManagement,mikrosimage/OpenRenderManagement,smaragden/OpenRenderManagement,smaragden/OpenRenderManagement,smaragden/OpenRenderManagement,mikrosimage/OpenRenderManagement
python
## Code Before: from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup from octopus.dispatcher import rules import logging logger = logging.getLogger("dispatcher") class RuleError(rules.RuleError): '''Base class for GraphViewBuilder related exceptions.''' pass class TaskNodeHasNoChildrenError(RuleError): '''Raised when a GraphViewBuilder is requested to add a child node to a FolderNode. ''' class GraphViewBuilder(object): def __init__(self, dispatchTree, root): self.dispatchTree = dispatchTree self.root = root def apply(self, task): id = None name = task.name parent = task.parent.nodes['graph_rule'] if task.parent else self.root user = task.user priority = task.priority dispatchKey = task.dispatchKey maxRN = task.maxRN if isinstance(task, TaskGroup): strategy = task.strategy node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN, strategy, taskGroup=task) else: node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task) task.nodes['graph_rule'] = node return [node] def processDependencies(self, dependencies): for task, taskdeps in dependencies.items(): node = task.nodes['graph_rule'] for deptask, statuslist in taskdeps.items(): depnode = deptask.nodes['graph_rule'] node.addDependency(depnode, statuslist) ## Instruction: Add a representation of GraphView object ## Code After: from octopus.dispatcher.model import TaskNode, FolderNode, TaskGroup from octopus.dispatcher import rules import logging logger = logging.getLogger("dispatcher") class RuleError(rules.RuleError): '''Base class for GraphViewBuilder related exceptions.''' pass class TaskNodeHasNoChildrenError(RuleError): '''Raised when a GraphViewBuilder is requested to add a child node to a FolderNode. ''' class GraphViewBuilder(object): def __init__(self, dispatchTree, root): self.dispatchTree = dispatchTree self.root = root def apply(self, task): id = None name = task.name parent = task.parent.nodes['graph_rule'] if task.parent else self.root user = task.user priority = task.priority dispatchKey = task.dispatchKey maxRN = task.maxRN if isinstance(task, TaskGroup): strategy = task.strategy node = FolderNode(id, name, parent, user, priority, dispatchKey, maxRN, strategy, taskGroup=task) else: node = TaskNode(None, name, parent, user, priority, dispatchKey, maxRN, task) task.nodes['graph_rule'] = node return [node] def processDependencies(self, dependencies): for task, taskdeps in dependencies.items(): node = task.nodes['graph_rule'] for deptask, statuslist in taskdeps.items(): depnode = deptask.nodes['graph_rule'] node.addDependency(depnode, statuslist) def __repr__(self): return "GraphViewBuilder( root=%r, dispatchTree=%r )" % (self.root, self.dispatchTree )
// ... existing code ... for deptask, statuslist in taskdeps.items(): depnode = deptask.nodes['graph_rule'] node.addDependency(depnode, statuslist) def __repr__(self): return "GraphViewBuilder( root=%r, dispatchTree=%r )" % (self.root, self.dispatchTree ) // ... rest of the code ...
fcced317586bf23cab4226f796da28b531e8684a
src/main/kotlin/de/axelrindle/simplecoins/command/ReloadCommand.kt
src/main/kotlin/de/axelrindle/simplecoins/command/ReloadCommand.kt
package de.axelrindle.simplecoins.command import de.axelrindle.pocketknife.PocketCommand import de.axelrindle.simplecoins.CoinManager import de.axelrindle.simplecoins.SimpleCoins import org.bukkit.command.Command import org.bukkit.command.CommandSender internal class ReloadCommand : PocketCommand() { override fun getName(): String { return "reload" } override fun getDescription(): String { return "Reloads the configuration from disk." } override fun getPermission(): String { return "simplecoins.reload" } override fun getUsage(): String { return "/simplecoins reload" } override fun handle(sender: CommandSender, command: Command, args: Array<out String>): Boolean { sender.sendMessage("${SimpleCoins.prefix} §bReloading...") try { // unload the CoinManager CoinManager.close() // reload the config files and re-init the CoinManager SimpleCoins.instance!!.pocketConfig.apply { reload("config") reload("database") } CoinManager.init(SimpleCoins.instance!!.pocketConfig) sender.sendMessage("${SimpleCoins.prefix} §aDone.") } catch (e: Exception) { sender.sendMessage("${SimpleCoins.prefix} §cSomething went wrong! Check the console for more information.") e.printStackTrace() } return true } override fun sendHelp(sender: CommandSender) { sender.sendMessage(getUsage()) } }
package de.axelrindle.simplecoins.command import de.axelrindle.pocketknife.PocketCommand import de.axelrindle.simplecoins.CoinManager import de.axelrindle.simplecoins.SimpleCoins import org.bukkit.command.Command import org.bukkit.command.CommandSender internal class ReloadCommand : PocketCommand() { override fun getName(): String { return "reload" } override fun getDescription(): String { return "Reloads the configuration from disk." } override fun getPermission(): String { return "simplecoins.reload" } override fun getUsage(): String { return "/simplecoins reload" } override fun handle(sender: CommandSender, command: Command, args: Array<out String>): Boolean { sender.sendMessage("${SimpleCoins.prefix} §bReloading...") try { // unload the CoinManager CoinManager.close() // reload the config files and re-init the CoinManager SimpleCoins.instance!!.apply { pocketConfig.reloadAll() CoinManager.init(pocketConfig) } sender.sendMessage("${SimpleCoins.prefix} §aDone.") } catch (e: Exception) { sender.sendMessage("${SimpleCoins.prefix} §cSomething went wrong! Check the console for more information.") e.printStackTrace() } return true } override fun sendHelp(sender: CommandSender) { sender.sendMessage(getUsage()) } }
Use reloadAll() to reload all config files
Use reloadAll() to reload all config files
Kotlin
mit
axelrindle/SimpleCoins
kotlin
## Code Before: package de.axelrindle.simplecoins.command import de.axelrindle.pocketknife.PocketCommand import de.axelrindle.simplecoins.CoinManager import de.axelrindle.simplecoins.SimpleCoins import org.bukkit.command.Command import org.bukkit.command.CommandSender internal class ReloadCommand : PocketCommand() { override fun getName(): String { return "reload" } override fun getDescription(): String { return "Reloads the configuration from disk." } override fun getPermission(): String { return "simplecoins.reload" } override fun getUsage(): String { return "/simplecoins reload" } override fun handle(sender: CommandSender, command: Command, args: Array<out String>): Boolean { sender.sendMessage("${SimpleCoins.prefix} §bReloading...") try { // unload the CoinManager CoinManager.close() // reload the config files and re-init the CoinManager SimpleCoins.instance!!.pocketConfig.apply { reload("config") reload("database") } CoinManager.init(SimpleCoins.instance!!.pocketConfig) sender.sendMessage("${SimpleCoins.prefix} §aDone.") } catch (e: Exception) { sender.sendMessage("${SimpleCoins.prefix} §cSomething went wrong! Check the console for more information.") e.printStackTrace() } return true } override fun sendHelp(sender: CommandSender) { sender.sendMessage(getUsage()) } } ## Instruction: Use reloadAll() to reload all config files ## Code After: package de.axelrindle.simplecoins.command import de.axelrindle.pocketknife.PocketCommand import de.axelrindle.simplecoins.CoinManager import de.axelrindle.simplecoins.SimpleCoins import org.bukkit.command.Command import org.bukkit.command.CommandSender internal class ReloadCommand : PocketCommand() { override fun getName(): String { return "reload" } override fun getDescription(): String { return "Reloads the configuration from disk." } override fun getPermission(): String { return "simplecoins.reload" } override fun getUsage(): String { return "/simplecoins reload" } override fun handle(sender: CommandSender, command: Command, args: Array<out String>): Boolean { sender.sendMessage("${SimpleCoins.prefix} §bReloading...") try { // unload the CoinManager CoinManager.close() // reload the config files and re-init the CoinManager SimpleCoins.instance!!.apply { pocketConfig.reloadAll() CoinManager.init(pocketConfig) } sender.sendMessage("${SimpleCoins.prefix} §aDone.") } catch (e: Exception) { sender.sendMessage("${SimpleCoins.prefix} §cSomething went wrong! Check the console for more information.") e.printStackTrace() } return true } override fun sendHelp(sender: CommandSender) { sender.sendMessage(getUsage()) } }
# ... existing code ... CoinManager.close() // reload the config files and re-init the CoinManager SimpleCoins.instance!!.apply { pocketConfig.reloadAll() CoinManager.init(pocketConfig) } sender.sendMessage("${SimpleCoins.prefix} §aDone.") } catch (e: Exception) { # ... rest of the code ...
af88a4da720094eb576f50664fa61d449eb005dd
include/rapidcheck/detail/Traits.h
include/rapidcheck/detail/Traits.h
namespace rc { namespace detail { namespace sfinae { template<typename T, typename = decltype(std::declval<T>() == std::declval<T>())> std::true_type isEqualityComparable(const T &); std::false_type isEqualityComparable(...); template<typename T, typename = decltype(std::cout << std::declval<T>())> std::true_type isStreamInsertible(const T &); std::false_type isStreamInsertible(...); } // namespace sfinae template<typename T> using IsEqualityComparable = decltype( sfinae::isEqualityComparable(std::declval<T>())); template<typename T> using IsStreamInsertible = decltype( sfinae::isStreamInsertible(std::declval<T>())); } // namespace detail } // namespace rc
namespace rc { namespace detail { #define RC_SFINAE_TRAIT(Name, expression) \ namespace sfinae { \ template<typename T, typename = expression> \ std::true_type test##Name(const T &); \ std::false_type test##Name(...); \ } \ \ template<typename T> \ using Name = decltype(sfinae::test##Name(std::declval<T>())); RC_SFINAE_TRAIT(IsEqualityComparable, decltype(std::declval<T>() == std::declval<T>())) RC_SFINAE_TRAIT(IsStreamInsertible, decltype(std::cout << std::declval<T>())) } // namespace detail } // namespace rc
Add macro to declare SFINAE based traits
Add macro to declare SFINAE based traits
C
bsd-2-clause
unapiedra/rapidfuzz,whoshuu/rapidcheck,unapiedra/rapidfuzz,whoshuu/rapidcheck,tm604/rapidcheck,emil-e/rapidcheck,tm604/rapidcheck,whoshuu/rapidcheck,emil-e/rapidcheck,emil-e/rapidcheck,tm604/rapidcheck,unapiedra/rapidfuzz
c
## Code Before: namespace rc { namespace detail { namespace sfinae { template<typename T, typename = decltype(std::declval<T>() == std::declval<T>())> std::true_type isEqualityComparable(const T &); std::false_type isEqualityComparable(...); template<typename T, typename = decltype(std::cout << std::declval<T>())> std::true_type isStreamInsertible(const T &); std::false_type isStreamInsertible(...); } // namespace sfinae template<typename T> using IsEqualityComparable = decltype( sfinae::isEqualityComparable(std::declval<T>())); template<typename T> using IsStreamInsertible = decltype( sfinae::isStreamInsertible(std::declval<T>())); } // namespace detail } // namespace rc ## Instruction: Add macro to declare SFINAE based traits ## Code After: namespace rc { namespace detail { #define RC_SFINAE_TRAIT(Name, expression) \ namespace sfinae { \ template<typename T, typename = expression> \ std::true_type test##Name(const T &); \ std::false_type test##Name(...); \ } \ \ template<typename T> \ using Name = decltype(sfinae::test##Name(std::declval<T>())); RC_SFINAE_TRAIT(IsEqualityComparable, decltype(std::declval<T>() == std::declval<T>())) RC_SFINAE_TRAIT(IsStreamInsertible, decltype(std::cout << std::declval<T>())) } // namespace detail } // namespace rc
# ... existing code ... namespace rc { namespace detail { #define RC_SFINAE_TRAIT(Name, expression) \ namespace sfinae { \ template<typename T, typename = expression> \ std::true_type test##Name(const T &); \ std::false_type test##Name(...); \ } \ \ template<typename T> \ using Name = decltype(sfinae::test##Name(std::declval<T>())); RC_SFINAE_TRAIT(IsEqualityComparable, decltype(std::declval<T>() == std::declval<T>())) RC_SFINAE_TRAIT(IsStreamInsertible, decltype(std::cout << std::declval<T>())) } // namespace detail } // namespace rc # ... rest of the code ...
f9f9111ddafb7dfd0554d541befd3cc660169689
apps/redirects/urls.py
apps/redirects/urls.py
from django.conf.urls.defaults import * from util import redirect urlpatterns = patterns('', redirect(r'^b2g', 'firefoxos'), redirect(r'^b2g/faq', 'firefoxos'), redirect(r'^b2g/about', 'firefoxos'), )
from django.conf.urls.defaults import * from util import redirect urlpatterns = patterns('', redirect(r'^b2g', 'firefoxos.firefoxos'), redirect(r'^b2g/faq', 'firefoxos.firefoxos'), redirect(r'^b2g/about', 'firefoxos.firefoxos'), )
Fix view name for b2g redirects
Fix view name for b2g redirects bug 792482
Python
mpl-2.0
dudepare/bedrock,rishiloyola/bedrock,mahinthjoe/bedrock,ckprice/bedrock,davehunt/bedrock,davidwboswell/documentation_autoresponse,jpetto/bedrock,dudepare/bedrock,glogiotatidis/bedrock,kyoshino/bedrock,mahinthjoe/bedrock,MichaelKohler/bedrock,ckprice/bedrock,analytics-pros/mozilla-bedrock,analytics-pros/mozilla-bedrock,MichaelKohler/bedrock,sylvestre/bedrock,CSCI-462-01-2017/bedrock,chirilo/bedrock,chirilo/bedrock,yglazko/bedrock,sgarrity/bedrock,SujaySKumar/bedrock,elin-moco/bedrock,kyoshino/bedrock,mmmavis/bedrock,jpetto/bedrock,andreadelrio/bedrock,davidwboswell/documentation_autoresponse,jpetto/bedrock,jacshfr/mozilla-bedrock,davehunt/bedrock,gauthierm/bedrock,Sancus/bedrock,ericawright/bedrock,gauthierm/bedrock,TheoChevalier/bedrock,mozilla/bedrock,gauthierm/bedrock,TheJJ100100/bedrock,ckprice/bedrock,TheoChevalier/bedrock,yglazko/bedrock,mmmavis/bedrock,flodolo/bedrock,bensternthal/bedrock,mmmavis/lightbeam-bedrock-website,glogiotatidis/bedrock,alexgibson/bedrock,glogiotatidis/bedrock,pascalchevrel/bedrock,MichaelKohler/bedrock,pmclanahan/bedrock,mmmavis/lightbeam-bedrock-website,jgmize/bedrock,sylvestre/bedrock,Sancus/bedrock,malena/bedrock,mozilla/mwc,kyoshino/bedrock,Jobava/bedrock,petabyte/bedrock,schalkneethling/bedrock,sgarrity/bedrock,gerv/bedrock,pascalchevrel/bedrock,CSCI-462-01-2017/bedrock,yglazko/bedrock,mozilla/mwc,jacshfr/mozilla-bedrock,jgmize/bedrock,mozilla/bedrock,Jobava/bedrock,mmmavis/bedrock,mkmelin/bedrock,glogiotatidis/bedrock,davidwboswell/documentation_autoresponse,petabyte/bedrock,marcoscaceres/bedrock,petabyte/bedrock,mermi/bedrock,jacshfr/mozilla-bedrock,davehunt/bedrock,CSCI-462-01-2017/bedrock,chirilo/bedrock,andreadelrio/bedrock,SujaySKumar/bedrock,ericawright/bedrock,sgarrity/bedrock,analytics-pros/mozilla-bedrock,TheJJ100100/bedrock,malena/bedrock,alexgibson/bedrock,craigcook/bedrock,malena/bedrock,mmmavis/bedrock,davehunt/bedrock,rishiloyola/bedrock,l-hedgehog/bedrock,mahinthjoe/bedrock,jacshfr/mozilla-bedrock,kyoshino/bedrock,flodolo/bedrock,pmclanahan/bedrock,gauthierm/bedrock,ericawright/bedrock,sgarrity/bedrock,andreadelrio/bedrock,bensternthal/bedrock,bensternthal/bedrock,mermi/bedrock,pmclanahan/bedrock,flodolo/bedrock,yglazko/bedrock,marcoscaceres/bedrock,mkmelin/bedrock,hoosteeno/bedrock,davidwboswell/documentation_autoresponse,amjadm61/bedrock,amjadm61/bedrock,TheoChevalier/bedrock,alexgibson/bedrock,Jobava/bedrock,jgmize/bedrock,amjadm61/bedrock,dudepare/bedrock,mozilla/bedrock,elin-moco/bedrock,schalkneethling/bedrock,petabyte/bedrock,mermi/bedrock,mmmavis/lightbeam-bedrock-website,craigcook/bedrock,pmclanahan/bedrock,rishiloyola/bedrock,analytics-pros/mozilla-bedrock,gerv/bedrock,alexgibson/bedrock,SujaySKumar/bedrock,dudepare/bedrock,andreadelrio/bedrock,amjadm61/bedrock,marcoscaceres/bedrock,elin-moco/bedrock,chirilo/bedrock,jacshfr/mozilla-bedrock,l-hedgehog/bedrock,gerv/bedrock,schalkneethling/bedrock,flodolo/bedrock,pascalchevrel/bedrock,bensternthal/bedrock,mozilla/bedrock,hoosteeno/bedrock,sylvestre/bedrock,Sancus/bedrock,jgmize/bedrock,hoosteeno/bedrock,sylvestre/bedrock,mahinthjoe/bedrock,TheJJ100100/bedrock,mozilla/mwc,ericawright/bedrock,Jobava/bedrock,rishiloyola/bedrock,craigcook/bedrock,amjadm61/bedrock,gerv/bedrock,mozilla/mwc,malena/bedrock,craigcook/bedrock,TheoChevalier/bedrock,schalkneethling/bedrock,TheJJ100100/bedrock,marcoscaceres/bedrock,mermi/bedrock,SujaySKumar/bedrock,ckprice/bedrock,pascalchevrel/bedrock,Sancus/bedrock,elin-moco/bedrock,l-hedgehog/bedrock,jpetto/bedrock,CSCI-462-01-2017/bedrock,l-hedgehog/bedrock,mkmelin/bedrock,hoosteeno/bedrock,mkmelin/bedrock,MichaelKohler/bedrock
python
## Code Before: from django.conf.urls.defaults import * from util import redirect urlpatterns = patterns('', redirect(r'^b2g', 'firefoxos'), redirect(r'^b2g/faq', 'firefoxos'), redirect(r'^b2g/about', 'firefoxos'), ) ## Instruction: Fix view name for b2g redirects bug 792482 ## Code After: from django.conf.urls.defaults import * from util import redirect urlpatterns = patterns('', redirect(r'^b2g', 'firefoxos.firefoxos'), redirect(r'^b2g/faq', 'firefoxos.firefoxos'), redirect(r'^b2g/about', 'firefoxos.firefoxos'), )
... urlpatterns = patterns('', redirect(r'^b2g', 'firefoxos.firefoxos'), redirect(r'^b2g/faq', 'firefoxos.firefoxos'), redirect(r'^b2g/about', 'firefoxos.firefoxos'), ) ...
42a523393d6ec2d5dfc80d1b82e2c703e1afa29b
calc.py
calc.py
"""calc.py: A simple calculator.""" import sys def add_all(nums): return sum(nums) def multiply_all(nums): return reduce(lambda a, b: a * b, nums) if __name__ == '__main__': command = sys.argv[1] nums = map(float, sys.argv[2:]) if command == 'add': print(add_all(nums)) elif command == 'multiply': print(multiply_all(nums)) elif command == 'min': print(min(nums)) else: usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]" print(usage)
"""calc.py: A simple calculator.""" import sys def add_all(nums): return sum(nums) def multiply_all(nums): return reduce(lambda a, b: a * b, nums) if __name__ == '__main__': command = sys.argv[1] nums = map(float, sys.argv[2:]) if command == 'add': print(add_all(nums)) elif command == 'multiply': print(multiply_all(nums)) elif command == 'min': print(min(nums)) else: usage = ("calc.py [add|multiply|min]" " NUM1 [NUM2 [NUM3 [...]]]") print(usage)
Update usage string for min
Update usage string for min
Python
bsd-3-clause
mkuiper/calc-1
python
## Code Before: """calc.py: A simple calculator.""" import sys def add_all(nums): return sum(nums) def multiply_all(nums): return reduce(lambda a, b: a * b, nums) if __name__ == '__main__': command = sys.argv[1] nums = map(float, sys.argv[2:]) if command == 'add': print(add_all(nums)) elif command == 'multiply': print(multiply_all(nums)) elif command == 'min': print(min(nums)) else: usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]" print(usage) ## Instruction: Update usage string for min ## Code After: """calc.py: A simple calculator.""" import sys def add_all(nums): return sum(nums) def multiply_all(nums): return reduce(lambda a, b: a * b, nums) if __name__ == '__main__': command = sys.argv[1] nums = map(float, sys.argv[2:]) if command == 'add': print(add_all(nums)) elif command == 'multiply': print(multiply_all(nums)) elif command == 'min': print(min(nums)) else: usage = ("calc.py [add|multiply|min]" " NUM1 [NUM2 [NUM3 [...]]]") print(usage)
// ... existing code ... elif command == 'min': print(min(nums)) else: usage = ("calc.py [add|multiply|min]" " NUM1 [NUM2 [NUM3 [...]]]") print(usage) // ... rest of the code ...
451a0773da3aafc525e60e2a222fd4d1613589f6
tests/test_splits.py
tests/test_splits.py
from tests.base import IntegrationTest class TestBurndown(IntegrationTest): def execute(self): self.command("TaskWikiBurndownDaily") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.daily") assert "Daily Burndown" in self.read_buffer()[0]
import re from tests.base import IntegrationTest from tasklib.task import local_zone from datetime import datetime class TestBurndownDailySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownDaily") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.daily") assert "Daily Burndown" in self.read_buffer()[0] class TestBurndownMonthlySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownMonthly") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.monthly") assert "Monthly Burndown" in self.read_buffer()[0] class TestBurndownWeeklySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownWeekly") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.weekly") assert "Weekly Burndown" in self.read_buffer()[0] class TestCalendarSimple(IntegrationTest): def execute(self): self.command("TaskWikiCalendar") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer calendar") # Assert each day is displayed at least once. output = self.read_buffer() for day in map(str, range(1, 29)): assert any(day in line for line in output) class TestGhistorySimple(IntegrationTest): tasks = [ dict(description="test task"), dict(description="completed task 1", status="completed", end="now"), dict(description="completed task 2", status="completed", end="now"), dict(description="deleted task", status="deleted"), ] def execute(self): self.command("TaskWikiGhistoryAnnual") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer ghistory.annual") output = self.read_buffer() header_words = ("Year", "Number", "Added", "Completed", "Deleted") for word in header_words: assert word in output[0] legend_words = ("Legend", "Added", "Completed", "Deleted") for word in legend_words: assert re.search(word, output[-1], re.IGNORECASE) current_year = local_zone.localize(datetime.now()).year assert str(current_year) in '\n'.join(output)
Add simple tests for burndown, calendar and ghistory commands
tests: Add simple tests for burndown, calendar and ghistory commands
Python
mit
Spirotot/taskwiki,phha/taskwiki
python
## Code Before: from tests.base import IntegrationTest class TestBurndown(IntegrationTest): def execute(self): self.command("TaskWikiBurndownDaily") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.daily") assert "Daily Burndown" in self.read_buffer()[0] ## Instruction: tests: Add simple tests for burndown, calendar and ghistory commands ## Code After: import re from tests.base import IntegrationTest from tasklib.task import local_zone from datetime import datetime class TestBurndownDailySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownDaily") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.daily") assert "Daily Burndown" in self.read_buffer()[0] class TestBurndownMonthlySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownMonthly") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.monthly") assert "Monthly Burndown" in self.read_buffer()[0] class TestBurndownWeeklySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownWeekly") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.weekly") assert "Weekly Burndown" in self.read_buffer()[0] class TestCalendarSimple(IntegrationTest): def execute(self): self.command("TaskWikiCalendar") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer calendar") # Assert each day is displayed at least once. output = self.read_buffer() for day in map(str, range(1, 29)): assert any(day in line for line in output) class TestGhistorySimple(IntegrationTest): tasks = [ dict(description="test task"), dict(description="completed task 1", status="completed", end="now"), dict(description="completed task 2", status="completed", end="now"), dict(description="deleted task", status="deleted"), ] def execute(self): self.command("TaskWikiGhistoryAnnual") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer ghistory.annual") output = self.read_buffer() header_words = ("Year", "Number", "Added", "Completed", "Deleted") for word in header_words: assert word in output[0] legend_words = ("Legend", "Added", "Completed", "Deleted") for word in legend_words: assert re.search(word, output[-1], re.IGNORECASE) current_year = local_zone.localize(datetime.now()).year assert str(current_year) in '\n'.join(output)
... import re from tests.base import IntegrationTest from tasklib.task import local_zone from datetime import datetime class TestBurndownDailySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownDaily") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.daily") assert "Daily Burndown" in self.read_buffer()[0] class TestBurndownMonthlySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownMonthly") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.monthly") assert "Monthly Burndown" in self.read_buffer()[0] class TestBurndownWeeklySimple(IntegrationTest): def execute(self): self.command("TaskWikiBurndownWeekly") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.weekly") assert "Weekly Burndown" in self.read_buffer()[0] class TestCalendarSimple(IntegrationTest): def execute(self): self.command("TaskWikiCalendar") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer calendar") # Assert each day is displayed at least once. output = self.read_buffer() for day in map(str, range(1, 29)): assert any(day in line for line in output) class TestGhistorySimple(IntegrationTest): tasks = [ dict(description="test task"), dict(description="completed task 1", status="completed", end="now"), dict(description="completed task 2", status="completed", end="now"), dict(description="deleted task", status="deleted"), ] def execute(self): self.command("TaskWikiGhistoryAnnual") assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer ghistory.annual") output = self.read_buffer() header_words = ("Year", "Number", "Added", "Completed", "Deleted") for word in header_words: assert word in output[0] legend_words = ("Legend", "Added", "Completed", "Deleted") for word in legend_words: assert re.search(word, output[-1], re.IGNORECASE) current_year = local_zone.localize(datetime.now()).year assert str(current_year) in '\n'.join(output) ...
351bfe236f183c069314f5df7d3c4b8f9d8699b4
final/problem6.py
final/problem6.py
class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Person.say(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue'))
class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Lecturer.lecture(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue'))
Modify lecture method in ArrogantProfessor class using inheritance
Modify lecture method in ArrogantProfessor class using inheritance
Python
mit
Kunal57/MIT_6.00.1x
python
## Code Before: class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Person.say(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue')) ## Instruction: Modify lecture method in ArrogantProfessor class using inheritance ## Code After: class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Lecturer.lecture(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue'))
# ... existing code ... class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Lecturer.lecture(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) # ... rest of the code ...
35325168839234efe98a927fda76548de553d666
test/test_pipeline.py
test/test_pipeline.py
from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): t=Pipeline(example0_ppl) def test_example1(): t=Pipeline(example1_ppl)
from pytest import raises from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): with raises(PypeSyntaxError): t=Pipeline(example0_ppl) def test_example1(): with raises(PypeSyntaxError): t=Pipeline(example1_ppl)
Raise pypeSyntaxError in pype test
Raise pypeSyntaxError in pype test
Python
mit
cs207-project/TimeSeries,cs207-project/TimeSeries,cs207-project/TimeSeries,cs207-project/TimeSeries
python
## Code Before: from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): t=Pipeline(example0_ppl) def test_example1(): t=Pipeline(example1_ppl) ## Instruction: Raise pypeSyntaxError in pype test ## Code After: from pytest import raises from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): with raises(PypeSyntaxError): t=Pipeline(example0_ppl) def test_example1(): with raises(PypeSyntaxError): t=Pipeline(example1_ppl)
// ... existing code ... from pytest import raises from pype.lexer import lexer from pype.pipeline import Pipeline // ... modified code ... print (token) def test_example0(): with raises(PypeSyntaxError): t=Pipeline(example0_ppl) def test_example1(): with raises(PypeSyntaxError): t=Pipeline(example1_ppl) // ... rest of the code ...
aa82f91d220e8985c7f6dc68433ad65e70a71d15
froide/foirequest/tests/test_mail.py
froide/foirequest/tests/test_mail.py
from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("[email protected]") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("[email protected]") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(message[1].attachments), 1)
from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("[email protected]") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("[email protected]") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(messages[1].attachments), 1) self.assertEqual(messages[1].attachments[0].name, u"TI - IFG-Antrag, Vordruck.docx")
Test for attachment in mail test
Test for attachment in mail test
Python
mit
catcosmo/froide,okfse/froide,fin/froide,stefanw/froide,catcosmo/froide,fin/froide,LilithWittmann/froide,LilithWittmann/froide,okfse/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide,LilithWittmann/froide,catcosmo/froide,catcosmo/froide,ryankanno/froide,CodeforHawaii/froide,okfse/froide,ryankanno/froide,ryankanno/froide,CodeforHawaii/froide,okfse/froide,ryankanno/froide,stefanw/froide,okfse/froide,CodeforHawaii/froide,stefanw/froide,CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,fin/froide,stefanw/froide,catcosmo/froide,fin/froide
python
## Code Before: from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("[email protected]") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("[email protected]") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(message[1].attachments), 1) ## Instruction: Test for attachment in mail test ## Code After: from __future__ import with_statement from django.test import TestCase from foirequest.tasks import _process_mail from foirequest.models import FoiRequest class MailTest(TestCase): fixtures = ['publicbodies.json', "foirequest.json"] def test_working(self): with file("foirequest/tests/test_mail_01.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("[email protected]") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) def test_working_with_attachment(self): with file("foirequest/tests/test_mail_02.txt") as f: _process_mail(f.read()) request = FoiRequest.objects.get_by_secret_mail("[email protected]") messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(messages[1].attachments), 1) self.assertEqual(messages[1].attachments[0].name, u"TI - IFG-Antrag, Vordruck.docx")
# ... existing code ... messages = request.foimessage_set.all() self.assertEqual(len(messages), 2) self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data") self.assertEqual(len(messages[1].attachments), 1) self.assertEqual(messages[1].attachments[0].name, u"TI - IFG-Antrag, Vordruck.docx") # ... rest of the code ...
41dde73589308e628aeaed1a86fbea918123819b
src/test/java/com/pardot/rhombus/functional/CassandraConnectionITCase.java
src/test/java/com/pardot/rhombus/functional/CassandraConnectionITCase.java
package com.pardot.rhombus.functional; import static org.junit.Assert.*; import com.datastax.driver.core.exceptions.InvalidConfigurationInQueryException; import com.pardot.rhombus.helpers.TestHelpers; import org.junit.Test; import com.datastax.driver.core.Session; import com.pardot.rhombus.ConnectionManager; import java.io.IOException; public class CassandraConnectionITCase { @Test public void testKeyspaceCreate() throws IOException { ConnectionManager cm = new ConnectionManager(TestHelpers.getTestCassandraConfiguration()); cm.buildCluster(); Session session = cm.getEmptySession(); assertNotNull(session); //Drop the functional keyspace if it exists try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidConfigurationInQueryException e) { //Ignore } //Create the functional keyspace session.execute("CREATE KEYSPACE functional_create WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }"); //Change to our functional testing keyspace session.execute("USE functional_create"); //Drop the functional keyspace try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidConfigurationInQueryException e) { //Ignore } //Shutdown the session session.shutdown(); //Teardown the connection manager cm.teardown(); } }
package com.pardot.rhombus.functional; import static org.junit.Assert.*; import com.datastax.driver.core.exceptions.InvalidConfigurationInQueryException; import com.datastax.driver.core.exceptions.InvalidQueryException; import com.pardot.rhombus.helpers.TestHelpers; import org.junit.Test; import com.datastax.driver.core.Session; import com.pardot.rhombus.ConnectionManager; import java.io.IOException; public class CassandraConnectionITCase { @Test public void testKeyspaceCreate() throws IOException { ConnectionManager cm = new ConnectionManager(TestHelpers.getTestCassandraConfiguration()); cm.buildCluster(); Session session = cm.getEmptySession(); assertNotNull(session); //Drop the functional keyspace if it exists try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidQueryException e) { //Ignore } //Create the functional keyspace session.execute("CREATE KEYSPACE functional_create WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }"); //Change to our functional testing keyspace session.execute("USE functional_create"); //Drop the functional keyspace try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidQueryException e) { //Ignore } //Shutdown the session session.shutdown(); //Teardown the connection manager cm.teardown(); } }
Fix test case to work with newest version of datastax java driver
Fix test case to work with newest version of datastax java driver
Java
mit
Pardot/Rhombus,ybrs/Rhombus,ybrs/Rhombus,Pardot/Rhombus
java
## Code Before: package com.pardot.rhombus.functional; import static org.junit.Assert.*; import com.datastax.driver.core.exceptions.InvalidConfigurationInQueryException; import com.pardot.rhombus.helpers.TestHelpers; import org.junit.Test; import com.datastax.driver.core.Session; import com.pardot.rhombus.ConnectionManager; import java.io.IOException; public class CassandraConnectionITCase { @Test public void testKeyspaceCreate() throws IOException { ConnectionManager cm = new ConnectionManager(TestHelpers.getTestCassandraConfiguration()); cm.buildCluster(); Session session = cm.getEmptySession(); assertNotNull(session); //Drop the functional keyspace if it exists try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidConfigurationInQueryException e) { //Ignore } //Create the functional keyspace session.execute("CREATE KEYSPACE functional_create WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }"); //Change to our functional testing keyspace session.execute("USE functional_create"); //Drop the functional keyspace try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidConfigurationInQueryException e) { //Ignore } //Shutdown the session session.shutdown(); //Teardown the connection manager cm.teardown(); } } ## Instruction: Fix test case to work with newest version of datastax java driver ## Code After: package com.pardot.rhombus.functional; import static org.junit.Assert.*; import com.datastax.driver.core.exceptions.InvalidConfigurationInQueryException; import com.datastax.driver.core.exceptions.InvalidQueryException; import com.pardot.rhombus.helpers.TestHelpers; import org.junit.Test; import com.datastax.driver.core.Session; import com.pardot.rhombus.ConnectionManager; import java.io.IOException; public class CassandraConnectionITCase { @Test public void testKeyspaceCreate() throws IOException { ConnectionManager cm = new ConnectionManager(TestHelpers.getTestCassandraConfiguration()); cm.buildCluster(); Session session = cm.getEmptySession(); assertNotNull(session); //Drop the functional keyspace if it exists try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidQueryException e) { //Ignore } //Create the functional keyspace session.execute("CREATE KEYSPACE functional_create WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }"); //Change to our functional testing keyspace session.execute("USE functional_create"); //Drop the functional keyspace try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidQueryException e) { //Ignore } //Shutdown the session session.shutdown(); //Teardown the connection manager cm.teardown(); } }
// ... existing code ... import static org.junit.Assert.*; import com.datastax.driver.core.exceptions.InvalidConfigurationInQueryException; import com.datastax.driver.core.exceptions.InvalidQueryException; import com.pardot.rhombus.helpers.TestHelpers; import org.junit.Test; // ... modified code ... //Drop the functional keyspace if it exists try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidQueryException e) { //Ignore } ... //Drop the functional keyspace try { session.execute("DROP KEYSPACE functional_create"); } catch (InvalidQueryException e) { //Ignore } // ... rest of the code ...
48402464f8e1feb9b50c0c98003bc808a7c33ed9
card_match.py
card_match.py
import pyglet def draw_card(): pyglet.graphics.draw(4, pyglet.gl.GL_QUADS, ('v2i', (10, 15, 10, 35, 20, 35, 20, 15) ) ) window = pyglet.window.Window() label = pyglet.text.Label('Hello, world', font_name='Times New Roman', font_size=36, x=window.width // 2, y=window.height // 2, anchor_x='center', anchor_y='center') # Set up event handlers # We need to do this after declaring the variables the handlers use # but before we start running the app @window.event def on_draw(): window.clear() label.draw() draw_card() pyglet.app.run()
import pyglet card_vertices = [ 0, 0, 0, 1, 1, 1, 1, 0 ] def draw_card(window): pyglet.graphics.draw(4, pyglet.gl.GL_QUADS, ('v2i', (get_scaled_vertices(window)) ) ) def get_scale(window): return 100, 100 # Place holder def get_scaled_vertices(window): scale = get_scale(window) scaled_vertices = [] for i in range(0, len(card_vertices), 2): scaled_vertices.append(card_vertices[i] * scale[0]) scaled_vertices.append(card_vertices[i + 1] * scale[1]) return scaled_vertices window = pyglet.window.Window() label = pyglet.text.Label('Hello, world', font_name='Times New Roman', font_size=36, x=window.width // 2, y=window.height // 2, anchor_x='center', anchor_y='center') # Set up event handlers # We need to do this after declaring the variables the handlers use # but before we start running the app @window.event def on_draw(): window.clear() label.draw() draw_card(window) pyglet.app.run()
Add skelton code for scaling card size
Add skelton code for scaling card size
Python
mit
SingingTree/CardMatchPyglet
python
## Code Before: import pyglet def draw_card(): pyglet.graphics.draw(4, pyglet.gl.GL_QUADS, ('v2i', (10, 15, 10, 35, 20, 35, 20, 15) ) ) window = pyglet.window.Window() label = pyglet.text.Label('Hello, world', font_name='Times New Roman', font_size=36, x=window.width // 2, y=window.height // 2, anchor_x='center', anchor_y='center') # Set up event handlers # We need to do this after declaring the variables the handlers use # but before we start running the app @window.event def on_draw(): window.clear() label.draw() draw_card() pyglet.app.run() ## Instruction: Add skelton code for scaling card size ## Code After: import pyglet card_vertices = [ 0, 0, 0, 1, 1, 1, 1, 0 ] def draw_card(window): pyglet.graphics.draw(4, pyglet.gl.GL_QUADS, ('v2i', (get_scaled_vertices(window)) ) ) def get_scale(window): return 100, 100 # Place holder def get_scaled_vertices(window): scale = get_scale(window) scaled_vertices = [] for i in range(0, len(card_vertices), 2): scaled_vertices.append(card_vertices[i] * scale[0]) scaled_vertices.append(card_vertices[i + 1] * scale[1]) return scaled_vertices window = pyglet.window.Window() label = pyglet.text.Label('Hello, world', font_name='Times New Roman', font_size=36, x=window.width // 2, y=window.height // 2, anchor_x='center', anchor_y='center') # Set up event handlers # We need to do this after declaring the variables the handlers use # but before we start running the app @window.event def on_draw(): window.clear() label.draw() draw_card(window) pyglet.app.run()
# ... existing code ... import pyglet card_vertices = [ 0, 0, 0, 1, 1, 1, 1, 0 ] def draw_card(window): pyglet.graphics.draw(4, pyglet.gl.GL_QUADS, ('v2i', (get_scaled_vertices(window)) ) ) def get_scale(window): return 100, 100 # Place holder def get_scaled_vertices(window): scale = get_scale(window) scaled_vertices = [] for i in range(0, len(card_vertices), 2): scaled_vertices.append(card_vertices[i] * scale[0]) scaled_vertices.append(card_vertices[i + 1] * scale[1]) return scaled_vertices window = pyglet.window.Window() # ... modified code ... def on_draw(): window.clear() label.draw() draw_card(window) pyglet.app.run() # ... rest of the code ...
e5ca8b8e4cf9ef7a3c12c1713be52f671995c638
src/jvm/backtype/storm/topology/BasicBoltExecutor.java
src/jvm/backtype/storm/topology/BasicBoltExecutor.java
package backtype.storm.topology; import backtype.storm.task.OutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.tuple.Tuple; import java.util.Map; import org.apache.log4j.Logger; public class BasicBoltExecutor implements IRichBolt { public static Logger LOG = Logger.getLogger(BasicBoltExecutor.class); private IBasicBolt _bolt; private transient BasicOutputCollector _collector; public BasicBoltExecutor(IBasicBolt bolt) { _bolt = bolt; } public void declareOutputFields(OutputFieldsDeclarer declarer) { _bolt.declareOutputFields(declarer); } public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { _bolt.prepare(stormConf, context); _collector = new BasicOutputCollector(collector); } public void execute(Tuple input) { _collector.setContext(input); try { _bolt.execute(input, _collector); _collector.getOutputter().ack(input); } catch(FailedException e) { if(e instanceof ReportedFailedException) { _collector.reportError(e); } _collector.getOutputter().fail(input); } } public void cleanup() { _bolt.cleanup(); } public Map<String, Object> getComponentConfiguration() { return _bolt.getComponentConfiguration(); } }
package backtype.storm.topology; import backtype.storm.task.OutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.tuple.Tuple; import java.util.Map; import org.apache.log4j.Logger; public class BasicBoltExecutor implements IRichBolt { public static Logger LOG = Logger.getLogger(BasicBoltExecutor.class); private IBasicBolt _bolt; private transient BasicOutputCollector _collector; public BasicBoltExecutor(IBasicBolt bolt) { _bolt = bolt; } public void declareOutputFields(OutputFieldsDeclarer declarer) { _bolt.declareOutputFields(declarer); } public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { _bolt.prepare(stormConf, context); _collector = new BasicOutputCollector(collector); } public void execute(Tuple input) { _collector.setContext(input); try { _bolt.execute(input, _collector); _collector.getOutputter().ack(input); } catch(FailedException e) { LOG.warn("Failed to process tuple", e); _collector.getOutputter().fail(input); } } public void cleanup() { _bolt.cleanup(); } public Map<String, Object> getComponentConfiguration() { return _bolt.getComponentConfiguration(); } }
Revert "basic bolts check for reportedfailedexception"
Revert "basic bolts check for reportedfailedexception" This reverts commit e8d886485923b0f7a5101e55ded48103751f55a0.
Java
apache-2.0
hmcc/storm,chrismoulton/storm,kishorvpatil/incubator-storm,sakanaou/storm,srishtyagrawal/storm,chrismoulton/storm,ujfjhz/storm,ujfjhz/storm,taoguan/storm-1,erikdw/storm,Crim/storm,erikdw/storm,sakanaou/storm,ujfjhz/storm,hilfialkaff/storm,kevinconaway/storm,kamleshbhatt/storm,cluo512/storm,Crim/storm,F30/storm,Aloomaio/incubator-storm,Aloomaio/incubator-storm,Aloomaio/incubator-storm,jamesmarva/storm,allenjin/storm,srishtyagrawal/storm,kishorvpatil/incubator-storm,kevinconaway/storm,srdo/storm,hmcc/storm,0x726d77/storm,0x726d77/storm,F30/storm,praisondani/storm,Aloomaio/incubator-storm,allenjin/storm,pczb/storm,allenjin/storm,erikdw/storm,d2r/storm-marz,kevpeek/storm,revans2/incubator-storm,gongsm/storm,hmcc/storm,ferrero-zhang/storm,rahulkavale/storm,praisondani/storm,knusbaum/incubator-storm,ujfjhz/storm,srdo/storm,3manuek/storm-1,rahulkavale/storm,3manuek/storm-1,allenjin/storm,nathanmarz/storm,erikdw/storm,srishtyagrawal/storm,metamx/incubator-storm,revans2/storm,cluo512/storm,metamx/incubator-storm,hilfialkaff/storm,jamesmarva/storm,praisondani/storm,nathanmarz/storm,cluo512/storm,konfer/storm,pczb/storm,adityasharad/storm,srishtyagrawal/storm,d2r/storm-marz,elancom/storm,revans2/storm,roshannaik/storm,carl34/storm,raviperi/storm,kamleshbhatt/storm,rahulkavale/storm,0x726d77/storm,kamleshbhatt/storm,roshannaik/storm,jamesmarva/storm,hmcl/storm-apache,metamx/incubator-storm,kevpeek/storm,kevinconaway/storm,raviperi/storm,Crim/storm,carl34/storm,kishorvpatil/incubator-storm,F30/storm,roshannaik/storm,sakanaou/storm,revans2/storm,Crim/storm,d2r/storm-marz,srdo/storm,kevpeek/storm,cherryleer/storm,F30/storm,rahulkavale/storm,knusbaum/incubator-storm,metamx/incubator-storm,Aloomaio/incubator-storm,kamleshbhatt/storm,Frostman/storm,erikdw/storm,ferrero-zhang/storm,adityasharad/storm,mesosphere/storm,taoguan/storm-1,konfer/storm,kamleshbhatt/storm,adityasharad/storm,adityasharad/storm,srishtyagrawal/storm,roshannaik/storm,ferrero-zhang/storm,chrismoulton/storm,hmcl/storm-apache,elancom/storm,F30/storm,praisondani/storm,rahulkavale/storm,0x726d77/storm,carl34/storm,gongsm/storm,kevinconaway/storm,ujfjhz/storm,cherryleer/storm,raviperi/storm,kishorvpatil/incubator-storm,hmcc/storm,kevpeek/storm,raviperi/storm,srdo/storm,Frostman/storm,elancom/storm,konfer/storm,knusbaum/incubator-storm,lcp0578/storm,hmcl/storm-apache,raviperi/storm,srdo/storm,3manuek/storm-1,nathanmarz/storm,hmcc/storm,d2r/storm-marz,0x726d77/storm,mesosphere/storm,elancom/storm,kishorvpatil/incubator-storm,Aloomaio/incubator-storm,Frostman/storm,Frostman/storm,mesosphere/storm,taoguan/storm-1,kevpeek/storm,kevpeek/storm,adityasharad/storm,roshannaik/storm,knusbaum/incubator-storm,chrismoulton/storm,knusbaum/incubator-storm,gongsm/storm,adityasharad/storm,allenjin/storm,hmcc/storm,F30/storm,0x726d77/storm,praisondani/storm,mesosphere/storm,kevinconaway/storm,sakanaou/storm,pczb/storm,pczb/storm,jamesmarva/storm,knusbaum/incubator-storm,elancom/storm,srishtyagrawal/storm,Aloomaio/incubator-storm,cluo512/storm,ujfjhz/storm,pczb/storm,3manuek/storm-1,d2r/storm-marz,carl34/storm,gongsm/storm,ferrero-zhang/storm,ujfjhz/storm,hilfialkaff/storm,hmcl/storm-apache,lcp0578/storm,adityasharad/storm,roshannaik/storm,revans2/storm,kamleshbhatt/storm,carl34/storm,kevinconaway/storm,pczb/storm,F30/storm,revans2/incubator-storm,0x726d77/storm,kamleshbhatt/storm,taoguan/storm-1,revans2/storm,cluo512/storm,kishorvpatil/incubator-storm,Crim/storm,Crim/storm,mesosphere/storm,Frostman/storm,hilfialkaff/storm,revans2/incubator-storm,hmcc/storm,kevpeek/storm,hilfialkaff/storm,pczb/storm,3manuek/storm-1,Crim/storm,hmcl/storm-apache,hmcl/storm-apache,nathanmarz/storm,cherryleer/storm,lcp0578/storm,cluo512/storm,knusbaum/incubator-storm,revans2/incubator-storm,chrismoulton/storm,metamx/incubator-storm,hmcl/storm-apache,ferrero-zhang/storm,lcp0578/storm,roshannaik/storm,konfer/storm,raviperi/storm,carl34/storm,srdo/storm,srdo/storm,sakanaou/storm,lcp0578/storm,sakanaou/storm,cherryleer/storm,kishorvpatil/incubator-storm,mesosphere/storm,erikdw/storm,konfer/storm,carl34/storm,cherryleer/storm,cluo512/storm,sakanaou/storm,revans2/incubator-storm,erikdw/storm,kevinconaway/storm,raviperi/storm,gongsm/storm,jamesmarva/storm,nathanmarz/storm,srishtyagrawal/storm,taoguan/storm-1
java
## Code Before: package backtype.storm.topology; import backtype.storm.task.OutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.tuple.Tuple; import java.util.Map; import org.apache.log4j.Logger; public class BasicBoltExecutor implements IRichBolt { public static Logger LOG = Logger.getLogger(BasicBoltExecutor.class); private IBasicBolt _bolt; private transient BasicOutputCollector _collector; public BasicBoltExecutor(IBasicBolt bolt) { _bolt = bolt; } public void declareOutputFields(OutputFieldsDeclarer declarer) { _bolt.declareOutputFields(declarer); } public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { _bolt.prepare(stormConf, context); _collector = new BasicOutputCollector(collector); } public void execute(Tuple input) { _collector.setContext(input); try { _bolt.execute(input, _collector); _collector.getOutputter().ack(input); } catch(FailedException e) { if(e instanceof ReportedFailedException) { _collector.reportError(e); } _collector.getOutputter().fail(input); } } public void cleanup() { _bolt.cleanup(); } public Map<String, Object> getComponentConfiguration() { return _bolt.getComponentConfiguration(); } } ## Instruction: Revert "basic bolts check for reportedfailedexception" This reverts commit e8d886485923b0f7a5101e55ded48103751f55a0. ## Code After: package backtype.storm.topology; import backtype.storm.task.OutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.tuple.Tuple; import java.util.Map; import org.apache.log4j.Logger; public class BasicBoltExecutor implements IRichBolt { public static Logger LOG = Logger.getLogger(BasicBoltExecutor.class); private IBasicBolt _bolt; private transient BasicOutputCollector _collector; public BasicBoltExecutor(IBasicBolt bolt) { _bolt = bolt; } public void declareOutputFields(OutputFieldsDeclarer declarer) { _bolt.declareOutputFields(declarer); } public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { _bolt.prepare(stormConf, context); _collector = new BasicOutputCollector(collector); } public void execute(Tuple input) { _collector.setContext(input); try { _bolt.execute(input, _collector); _collector.getOutputter().ack(input); } catch(FailedException e) { LOG.warn("Failed to process tuple", e); _collector.getOutputter().fail(input); } } public void cleanup() { _bolt.cleanup(); } public Map<String, Object> getComponentConfiguration() { return _bolt.getComponentConfiguration(); } }
# ... existing code ... _bolt.execute(input, _collector); _collector.getOutputter().ack(input); } catch(FailedException e) { LOG.warn("Failed to process tuple", e); _collector.getOutputter().fail(input); } } # ... rest of the code ...
76c87d06efaac19350d870cd1c95229ed0a66c29
editdistance/__init__.py
editdistance/__init__.py
from .bycython import eval __all__ = ('eval',)
from .bycython import eval def distance(*args, **kwargs): """"An alias to eval""" return eval(*args, **kwargs) __all__ = ('eval', 'distance')
Add alias method named "distance"
Add alias method named "distance"
Python
mit
aflc/editdistance,aflc/editdistance,aflc/editdistance
python
## Code Before: from .bycython import eval __all__ = ('eval',) ## Instruction: Add alias method named "distance" ## Code After: from .bycython import eval def distance(*args, **kwargs): """"An alias to eval""" return eval(*args, **kwargs) __all__ = ('eval', 'distance')
... from .bycython import eval def distance(*args, **kwargs): """"An alias to eval""" return eval(*args, **kwargs) __all__ = ('eval', 'distance') ...
70808dfd53c5a5760a13252a72caf229793e8225
crawl.py
crawl.py
import urllib2; from bs4 import BeautifulSoup;
import urllib.parse; import urllib.request; from bs4 import BeautifulSoup; def searchLink(search): BASE_URL = "http://www.990.ro/" key = urllib.parse.urlencode({'kw': search}).encode('ascii'); re = urllib.request.Request(BASE_URL + 'functions/search3/live_search_using_jquery_ajax/search.php', key); re_link = urllib.request.urlopen(re); soup = BeautifulSoup(re_link.read(), "lxml"); ref = soup.find_all('a'); names = soup.find_all('div', id="rest"); if(ref != []): print("Search returned:") i = 1; for name in names: print(str(i) + ". " + name.get_text()); i+=1; select = int(input("\nPlease select the corresponding number: ")); return BASE_URL + ref[select - 1].get('href'); else: print("Nothing found!"); return ''; movie = input("search: "); print(searchLink(movie));
Add search method to find the movies/series home url
Add search method to find the movies/series home url
Python
mit
raztechs/py-video-crawler
python
## Code Before: import urllib2; from bs4 import BeautifulSoup; ## Instruction: Add search method to find the movies/series home url ## Code After: import urllib.parse; import urllib.request; from bs4 import BeautifulSoup; def searchLink(search): BASE_URL = "http://www.990.ro/" key = urllib.parse.urlencode({'kw': search}).encode('ascii'); re = urllib.request.Request(BASE_URL + 'functions/search3/live_search_using_jquery_ajax/search.php', key); re_link = urllib.request.urlopen(re); soup = BeautifulSoup(re_link.read(), "lxml"); ref = soup.find_all('a'); names = soup.find_all('div', id="rest"); if(ref != []): print("Search returned:") i = 1; for name in names: print(str(i) + ". " + name.get_text()); i+=1; select = int(input("\nPlease select the corresponding number: ")); return BASE_URL + ref[select - 1].get('href'); else: print("Nothing found!"); return ''; movie = input("search: "); print(searchLink(movie));
# ... existing code ... import urllib.parse; import urllib.request; from bs4 import BeautifulSoup; def searchLink(search): BASE_URL = "http://www.990.ro/" key = urllib.parse.urlencode({'kw': search}).encode('ascii'); re = urllib.request.Request(BASE_URL + 'functions/search3/live_search_using_jquery_ajax/search.php', key); re_link = urllib.request.urlopen(re); soup = BeautifulSoup(re_link.read(), "lxml"); ref = soup.find_all('a'); names = soup.find_all('div', id="rest"); if(ref != []): print("Search returned:") i = 1; for name in names: print(str(i) + ". " + name.get_text()); i+=1; select = int(input("\nPlease select the corresponding number: ")); return BASE_URL + ref[select - 1].get('href'); else: print("Nothing found!"); return ''; movie = input("search: "); print(searchLink(movie)); # ... rest of the code ...
c769b66c546ad3fd9d04c0607506a49e9d3bff4a
fortdepend/preprocessor.py
fortdepend/preprocessor.py
import io import pcpp class FortranPreprocessor(pcpp.Preprocessor): def __init__(self): super().__init__() def parse_to_string_lines(self, text): with io.StringIO() as f: self.parse(text) self.write(f) f.seek(0) result = f.readlines() return result
import io import pcpp class FortranPreprocessor(pcpp.Preprocessor): def __init__(self): super(pcpp.Preprocessor, self).__init__() def parse_to_string_lines(self, text): with io.StringIO() as f: self.parse(text) self.write(f) f.seek(0) result = f.readlines() return result
Fix super() call for py2.7
Fix super() call for py2.7
Python
mit
ZedThree/fort_depend.py,ZedThree/fort_depend.py
python
## Code Before: import io import pcpp class FortranPreprocessor(pcpp.Preprocessor): def __init__(self): super().__init__() def parse_to_string_lines(self, text): with io.StringIO() as f: self.parse(text) self.write(f) f.seek(0) result = f.readlines() return result ## Instruction: Fix super() call for py2.7 ## Code After: import io import pcpp class FortranPreprocessor(pcpp.Preprocessor): def __init__(self): super(pcpp.Preprocessor, self).__init__() def parse_to_string_lines(self, text): with io.StringIO() as f: self.parse(text) self.write(f) f.seek(0) result = f.readlines() return result
... class FortranPreprocessor(pcpp.Preprocessor): def __init__(self): super(pcpp.Preprocessor, self).__init__() def parse_to_string_lines(self, text): with io.StringIO() as f: ...
1862317c3b463704c8264f71007e7b910772b44e
tests/test_pprint.py
tests/test_pprint.py
import pytest from mappyfile.pprint import PrettyPrinter def test_print_map(): mf = {} pp = PrettyPrinter() # expected txt = pp.pprint(mf) assert(expected == txt) def run_tests(): #pytest.main(["tests/test_pprint.py::test_print_map"]) pytest.main(["tests/test_pprint.py"]) if __name__ == "__main__": run_tests()
import pytest from mappyfile.pprint import PrettyPrinter import mappyfile def test_format_list(): s = """ CLASS STYLE COLOR 173 216 230 END STYLE OUTLINECOLOR 2 2 2 WIDTH 1 LINECAP BUTT PATTERN 5 5 10 10 END END END """ ast = mappyfile.loads(s) #print ast pp = PrettyPrinter(indent=0) # expected k = "pattern" lst = [[5, 5, 10, 10]] assert(pp.is_paired_list(k)) r = pp.process_list(k, lst, 0) exp = [u'PATTERN', '5 5\n10 10', u'END'] assert(r == exp) def run_tests(): pytest.main(["tests/test_pprint.py::test_format_list"]) #pytest.main(["tests/test_pprint.py"]) if __name__ == "__main__": #run_tests() test_format_list()
Add pair list formatting test
Add pair list formatting test
Python
mit
Jenselme/mappyfile,geographika/mappyfile,geographika/mappyfile
python
## Code Before: import pytest from mappyfile.pprint import PrettyPrinter def test_print_map(): mf = {} pp = PrettyPrinter() # expected txt = pp.pprint(mf) assert(expected == txt) def run_tests(): #pytest.main(["tests/test_pprint.py::test_print_map"]) pytest.main(["tests/test_pprint.py"]) if __name__ == "__main__": run_tests() ## Instruction: Add pair list formatting test ## Code After: import pytest from mappyfile.pprint import PrettyPrinter import mappyfile def test_format_list(): s = """ CLASS STYLE COLOR 173 216 230 END STYLE OUTLINECOLOR 2 2 2 WIDTH 1 LINECAP BUTT PATTERN 5 5 10 10 END END END """ ast = mappyfile.loads(s) #print ast pp = PrettyPrinter(indent=0) # expected k = "pattern" lst = [[5, 5, 10, 10]] assert(pp.is_paired_list(k)) r = pp.process_list(k, lst, 0) exp = [u'PATTERN', '5 5\n10 10', u'END'] assert(r == exp) def run_tests(): pytest.main(["tests/test_pprint.py::test_format_list"]) #pytest.main(["tests/test_pprint.py"]) if __name__ == "__main__": #run_tests() test_format_list()
// ... existing code ... import pytest from mappyfile.pprint import PrettyPrinter import mappyfile def test_format_list(): s = """ CLASS STYLE COLOR 173 216 230 END STYLE OUTLINECOLOR 2 2 2 WIDTH 1 LINECAP BUTT PATTERN 5 5 10 10 END END END """ ast = mappyfile.loads(s) #print ast pp = PrettyPrinter(indent=0) # expected k = "pattern" lst = [[5, 5, 10, 10]] assert(pp.is_paired_list(k)) r = pp.process_list(k, lst, 0) exp = [u'PATTERN', '5 5\n10 10', u'END'] assert(r == exp) def run_tests(): pytest.main(["tests/test_pprint.py::test_format_list"]) #pytest.main(["tests/test_pprint.py"]) if __name__ == "__main__": #run_tests() test_format_list() // ... rest of the code ...
1991dc4c60a338c2a5c3548684160e6ff9e858a2
examples/expl_google.py
examples/expl_google.py
import re import mechanicalsoup # Connect to Google browser = mechanicalsoup.StatefulBrowser() browser.open("https://www.google.com/") # Fill-in the form browser.select_form('form[action="/search"]') browser["q"] = "MechanicalSoup" browser.submit_selected(btnName="btnG") # Display links for link in browser.links(): target = link.attrs['href'] # Filter-out unrelated links and extract actual URL from Google's # click-tracking. if (target.startswith('/url?') and not target.startswith("/url?q=http://webcache.googleusercontent.com")): target = re.sub(r"^/url\?q=([^&]*)&.*", r"\1", target) print(target)
import re import mechanicalsoup # Connect to Google browser = mechanicalsoup.StatefulBrowser() browser.open("https://www.google.com/") # Fill-in the form browser.select_form('form[action="/search"]') browser["q"] = "MechanicalSoup" # Note: the button name is btnK in the content served to actual # browsers, but btnG for bots. browser.submit_selected(btnName="btnG") # Display links for link in browser.links(): target = link.attrs['href'] # Filter-out unrelated links and extract actual URL from Google's # click-tracking. if (target.startswith('/url?') and not target.startswith("/url?q=http://webcache.googleusercontent.com")): target = re.sub(r"^/url\?q=([^&]*)&.*", r"\1", target) print(target)
Add comment about button name on google example
Add comment about button name on google example
Python
mit
MechanicalSoup/MechanicalSoup,hemberger/MechanicalSoup,hickford/MechanicalSoup
python
## Code Before: import re import mechanicalsoup # Connect to Google browser = mechanicalsoup.StatefulBrowser() browser.open("https://www.google.com/") # Fill-in the form browser.select_form('form[action="/search"]') browser["q"] = "MechanicalSoup" browser.submit_selected(btnName="btnG") # Display links for link in browser.links(): target = link.attrs['href'] # Filter-out unrelated links and extract actual URL from Google's # click-tracking. if (target.startswith('/url?') and not target.startswith("/url?q=http://webcache.googleusercontent.com")): target = re.sub(r"^/url\?q=([^&]*)&.*", r"\1", target) print(target) ## Instruction: Add comment about button name on google example ## Code After: import re import mechanicalsoup # Connect to Google browser = mechanicalsoup.StatefulBrowser() browser.open("https://www.google.com/") # Fill-in the form browser.select_form('form[action="/search"]') browser["q"] = "MechanicalSoup" # Note: the button name is btnK in the content served to actual # browsers, but btnG for bots. browser.submit_selected(btnName="btnG") # Display links for link in browser.links(): target = link.attrs['href'] # Filter-out unrelated links and extract actual URL from Google's # click-tracking. if (target.startswith('/url?') and not target.startswith("/url?q=http://webcache.googleusercontent.com")): target = re.sub(r"^/url\?q=([^&]*)&.*", r"\1", target) print(target)
... # Fill-in the form browser.select_form('form[action="/search"]') browser["q"] = "MechanicalSoup" # Note: the button name is btnK in the content served to actual # browsers, but btnG for bots. browser.submit_selected(btnName="btnG") # Display links ...
cd025e47c0580573f23bf9b6b8c6ff29d9db2264
src/test/java/info/u_team/u_team_test/init/TestEntityTypes.java
src/test/java/info/u_team/u_team_test/init/TestEntityTypes.java
package info.u_team.u_team_test.init; import info.u_team.u_team_core.entitytype.UEntityType; import info.u_team.u_team_core.util.registry.BaseRegistryUtil; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.entity.BetterEnderPearlEntity; import net.minecraft.entity.*; import net.minecraftforge.event.RegistryEvent.Register; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus; @EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD) public class TestEntityTypes { public static final EntityType<BetterEnderPearlEntity> BETTER_ENDERPEARL = UEntityType.UBuilder.<BetterEnderPearlEntity> create("better_enderpearl", BetterEnderPearlEntity::new, EntityClassification.MISC).size(0.25F, 0.25F).setTrackingRange(128).setUpdateInterval(20).setShouldReceiveVelocityUpdates(true).build(); @SubscribeEvent public static void register(Register<EntityType<?>> event) { BaseRegistryUtil.getAllGenericRegistryEntriesAndApplyNames(TestMod.MODID, EntityType.class).forEach(event.getRegistry()::register); } }
package info.u_team.u_team_test.init; import info.u_team.u_team_core.entitytype.UEntityType; import info.u_team.u_team_test.entity.BetterEnderPearlEntity; import net.minecraft.entity.*; public class TestEntityTypes { public static final EntityType<BetterEnderPearlEntity> BETTER_ENDERPEARL = UEntityType.UBuilder.<BetterEnderPearlEntity> create("better_enderpearl", BetterEnderPearlEntity::new, EntityClassification.MISC).size(0.25F, 0.25F).setTrackingRange(128).setUpdateInterval(20).setShouldReceiveVelocityUpdates(true).build(); }
Remove the old registry way from test entity types
Remove the old registry way from test entity types
Java
apache-2.0
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
java
## Code Before: package info.u_team.u_team_test.init; import info.u_team.u_team_core.entitytype.UEntityType; import info.u_team.u_team_core.util.registry.BaseRegistryUtil; import info.u_team.u_team_test.TestMod; import info.u_team.u_team_test.entity.BetterEnderPearlEntity; import net.minecraft.entity.*; import net.minecraftforge.event.RegistryEvent.Register; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus; @EventBusSubscriber(modid = TestMod.MODID, bus = Bus.MOD) public class TestEntityTypes { public static final EntityType<BetterEnderPearlEntity> BETTER_ENDERPEARL = UEntityType.UBuilder.<BetterEnderPearlEntity> create("better_enderpearl", BetterEnderPearlEntity::new, EntityClassification.MISC).size(0.25F, 0.25F).setTrackingRange(128).setUpdateInterval(20).setShouldReceiveVelocityUpdates(true).build(); @SubscribeEvent public static void register(Register<EntityType<?>> event) { BaseRegistryUtil.getAllGenericRegistryEntriesAndApplyNames(TestMod.MODID, EntityType.class).forEach(event.getRegistry()::register); } } ## Instruction: Remove the old registry way from test entity types ## Code After: package info.u_team.u_team_test.init; import info.u_team.u_team_core.entitytype.UEntityType; import info.u_team.u_team_test.entity.BetterEnderPearlEntity; import net.minecraft.entity.*; public class TestEntityTypes { public static final EntityType<BetterEnderPearlEntity> BETTER_ENDERPEARL = UEntityType.UBuilder.<BetterEnderPearlEntity> create("better_enderpearl", BetterEnderPearlEntity::new, EntityClassification.MISC).size(0.25F, 0.25F).setTrackingRange(128).setUpdateInterval(20).setShouldReceiveVelocityUpdates(true).build(); }
# ... existing code ... package info.u_team.u_team_test.init; import info.u_team.u_team_core.entitytype.UEntityType; import info.u_team.u_team_test.entity.BetterEnderPearlEntity; import net.minecraft.entity.*; public class TestEntityTypes { public static final EntityType<BetterEnderPearlEntity> BETTER_ENDERPEARL = UEntityType.UBuilder.<BetterEnderPearlEntity> create("better_enderpearl", BetterEnderPearlEntity::new, EntityClassification.MISC).size(0.25F, 0.25F).setTrackingRange(128).setUpdateInterval(20).setShouldReceiveVelocityUpdates(true).build(); } # ... rest of the code ...
51e7cd3bc5a9a56fb53a5b0a8328d0b9d58848dd
modder/utils/desktop_notification.py
modder/utils/desktop_notification.py
import platform if platform.system() == 'Darwin': from Foundation import NSUserNotificationDefaultSoundName import objc NSUserNotification = objc.lookUpClass('NSUserNotification') NSUserNotificationCenter = objc.lookUpClass('NSUserNotificationCenter') def desktop_notify(text, title='Modder', sound=False): notification = NSUserNotification.alloc().init() notification.setTitle_(title.decode('utf-8')) notification.setInformativeText_(text.decode('utf-8')) if sound: notification.setSoundName_(NSUserNotificationDefaultSoundName) center = NSUserNotificationCenter.defaultUserNotificationCenter() center.deliverNotification_(notification) elif platform.system() == 'Windows': def desktop_notify(text, title='Modder', sound=False): pass elif platform.system() == 'Linux': def desktop_notify(text, title='Modder', sound=False): pass
import platform if platform.system() == 'Darwin': from Foundation import NSUserNotificationDefaultSoundName import objc NSUserNotification = objc.lookUpClass('NSUserNotification') NSUserNotificationCenter = objc.lookUpClass('NSUserNotificationCenter') def desktop_notify(text, title=None, sound=False): title = title or 'Modder' notification = NSUserNotification.alloc().init() notification.setTitle_(title.decode('utf-8')) notification.setInformativeText_(text.decode('utf-8')) if sound: notification.setSoundName_(NSUserNotificationDefaultSoundName) center = NSUserNotificationCenter.defaultUserNotificationCenter() center.deliverNotification_(notification) elif platform.system() == 'Windows': def desktop_notify(text, title=None, sound=False): title = title or 'Modder' pass elif platform.system() == 'Linux': def desktop_notify(text, title=None, sound=False): title = title or 'Modder' pass
Fix title for desktop notification
Fix title for desktop notification
Python
mit
JokerQyou/Modder2
python
## Code Before: import platform if platform.system() == 'Darwin': from Foundation import NSUserNotificationDefaultSoundName import objc NSUserNotification = objc.lookUpClass('NSUserNotification') NSUserNotificationCenter = objc.lookUpClass('NSUserNotificationCenter') def desktop_notify(text, title='Modder', sound=False): notification = NSUserNotification.alloc().init() notification.setTitle_(title.decode('utf-8')) notification.setInformativeText_(text.decode('utf-8')) if sound: notification.setSoundName_(NSUserNotificationDefaultSoundName) center = NSUserNotificationCenter.defaultUserNotificationCenter() center.deliverNotification_(notification) elif platform.system() == 'Windows': def desktop_notify(text, title='Modder', sound=False): pass elif platform.system() == 'Linux': def desktop_notify(text, title='Modder', sound=False): pass ## Instruction: Fix title for desktop notification ## Code After: import platform if platform.system() == 'Darwin': from Foundation import NSUserNotificationDefaultSoundName import objc NSUserNotification = objc.lookUpClass('NSUserNotification') NSUserNotificationCenter = objc.lookUpClass('NSUserNotificationCenter') def desktop_notify(text, title=None, sound=False): title = title or 'Modder' notification = NSUserNotification.alloc().init() notification.setTitle_(title.decode('utf-8')) notification.setInformativeText_(text.decode('utf-8')) if sound: notification.setSoundName_(NSUserNotificationDefaultSoundName) center = NSUserNotificationCenter.defaultUserNotificationCenter() center.deliverNotification_(notification) elif platform.system() == 'Windows': def desktop_notify(text, title=None, sound=False): title = title or 'Modder' pass elif platform.system() == 'Linux': def desktop_notify(text, title=None, sound=False): title = title or 'Modder' pass
... NSUserNotification = objc.lookUpClass('NSUserNotification') NSUserNotificationCenter = objc.lookUpClass('NSUserNotificationCenter') def desktop_notify(text, title=None, sound=False): title = title or 'Modder' notification = NSUserNotification.alloc().init() notification.setTitle_(title.decode('utf-8')) notification.setInformativeText_(text.decode('utf-8')) ... center.deliverNotification_(notification) elif platform.system() == 'Windows': def desktop_notify(text, title=None, sound=False): title = title or 'Modder' pass elif platform.system() == 'Linux': def desktop_notify(text, title=None, sound=False): title = title or 'Modder' pass ...
04e243aafbd08008556d83d73fbbf22e5398aab4
telostats/stations/models.py
telostats/stations/models.py
from django.db import models from django.utils import timezone class Station(models.Model): id = models.IntegerField(unique=True, primary_key=True) name = models.CharField(u'name', max_length=100) longitude = models.FloatField(u'longitude') latitude = models.FloatField(u'latitude') class Status(models.Model): station = models.ForeignKey(Station) timestamp = models.DateTimeField(default=timezone.now) actual_timestamp = models.DateTimeField(default=timezone.now) bikes = models.IntegerField(u'available bikes') docks = models.IntegerField(u'available docks')
from django.db import models from django.utils import timezone class Station(models.Model): id = models.IntegerField(unique=True, primary_key=True) name = models.CharField(u'name', max_length=100) longitude = models.FloatField(u'longitude') latitude = models.FloatField(u'latitude') def __unicode__(self): return self.name class Status(models.Model): station = models.ForeignKey(Station) timestamp = models.DateTimeField(default=timezone.now) actual_timestamp = models.DateTimeField(default=timezone.now) bikes = models.IntegerField(u'available bikes') docks = models.IntegerField(u'available docks') def __unicode__(self): return u'{}: {}/{} ({})'.format( self.station, self.bikes, self.docks, self.timestamp)
Add unicode methods to Station/Status
Add unicode methods to Station/Status
Python
bsd-3-clause
idan/telostats,idan/telostats,idan/telostats
python
## Code Before: from django.db import models from django.utils import timezone class Station(models.Model): id = models.IntegerField(unique=True, primary_key=True) name = models.CharField(u'name', max_length=100) longitude = models.FloatField(u'longitude') latitude = models.FloatField(u'latitude') class Status(models.Model): station = models.ForeignKey(Station) timestamp = models.DateTimeField(default=timezone.now) actual_timestamp = models.DateTimeField(default=timezone.now) bikes = models.IntegerField(u'available bikes') docks = models.IntegerField(u'available docks') ## Instruction: Add unicode methods to Station/Status ## Code After: from django.db import models from django.utils import timezone class Station(models.Model): id = models.IntegerField(unique=True, primary_key=True) name = models.CharField(u'name', max_length=100) longitude = models.FloatField(u'longitude') latitude = models.FloatField(u'latitude') def __unicode__(self): return self.name class Status(models.Model): station = models.ForeignKey(Station) timestamp = models.DateTimeField(default=timezone.now) actual_timestamp = models.DateTimeField(default=timezone.now) bikes = models.IntegerField(u'available bikes') docks = models.IntegerField(u'available docks') def __unicode__(self): return u'{}: {}/{} ({})'.format( self.station, self.bikes, self.docks, self.timestamp)
// ... existing code ... longitude = models.FloatField(u'longitude') latitude = models.FloatField(u'latitude') def __unicode__(self): return self.name class Status(models.Model): station = models.ForeignKey(Station) // ... modified code ... actual_timestamp = models.DateTimeField(default=timezone.now) bikes = models.IntegerField(u'available bikes') docks = models.IntegerField(u'available docks') def __unicode__(self): return u'{}: {}/{} ({})'.format( self.station, self.bikes, self.docks, self.timestamp) // ... rest of the code ...