commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
884483d27f7c0fac3975da17a7ef5c470ef9e3b4
fcm_django/apps.py
fcm_django/apps.py
from django.apps import AppConfig from fcm_django.settings import FCM_DJANGO_SETTINGS as SETTINGS class FcmDjangoConfig(AppConfig): name = "fcm_django" verbose_name = SETTINGS["APP_VERBOSE_NAME"]
from django.apps import AppConfig from fcm_django.settings import FCM_DJANGO_SETTINGS as SETTINGS class FcmDjangoConfig(AppConfig): name = "fcm_django" verbose_name = SETTINGS["APP_VERBOSE_NAME"] default_auto_field = "django.db.models.BigAutoField"
Use BigAutoField as the default ID
Use BigAutoField as the default ID
Python
mit
xtrinch/fcm-django
python
## Code Before: from django.apps import AppConfig from fcm_django.settings import FCM_DJANGO_SETTINGS as SETTINGS class FcmDjangoConfig(AppConfig): name = "fcm_django" verbose_name = SETTINGS["APP_VERBOSE_NAME"] ## Instruction: Use BigAutoField as the default ID ## Code After: from django.apps import AppConfig from fcm_django.settings import FCM_DJANGO_SETTINGS as SETTINGS class FcmDjangoConfig(AppConfig): name = "fcm_django" verbose_name = SETTINGS["APP_VERBOSE_NAME"] default_auto_field = "django.db.models.BigAutoField"
# ... existing code ... class FcmDjangoConfig(AppConfig): name = "fcm_django" verbose_name = SETTINGS["APP_VERBOSE_NAME"] default_auto_field = "django.db.models.BigAutoField" # ... rest of the code ...
8c1b7f8a5a7403e464938aa0aa6876557ec6a2b3
daphne/server.py
daphne/server.py
import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000): self.channel_layer = channel_layer self.host = host self.port = port def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run() def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message)
import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000, signal_handlers=True): self.channel_layer = channel_layer self.host = host self.port = port self.signal_handlers = signal_handlers def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run(installSignalHandlers=self.signal_handlers) def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message)
Allow signal handlers to be disabled to run in subthread
Allow signal handlers to be disabled to run in subthread
Python
bsd-3-clause
django/daphne,maikhoepfel/daphne
python
## Code Before: import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000): self.channel_layer = channel_layer self.host = host self.port = port def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run() def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message) ## Instruction: Allow signal handlers to be disabled to run in subthread ## Code After: import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000, signal_handlers=True): self.channel_layer = channel_layer self.host = host self.port = port self.signal_handlers = signal_handlers def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run(installSignalHandlers=self.signal_handlers) def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message)
# ... existing code ... class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000, signal_handlers=True): self.channel_layer = channel_layer self.host = host self.port = port self.signal_handlers = signal_handlers def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run(installSignalHandlers=self.signal_handlers) def backend_reader(self): """ # ... rest of the code ...
032332855e3484eaac5fbf6374706947e140cfdd
clc-java-sdk/sdk/src/test/java/com/centurylink/cloud/sdk/servers/services/CreateServerAsyncTest.java
clc-java-sdk/sdk/src/test/java/com/centurylink/cloud/sdk/servers/services/CreateServerAsyncTest.java
package com.centurylink.cloud.sdk.servers.services; import com.centurylink.cloud.sdk.servers.AbstractServersSdkTest; import com.centurylink.cloud.sdk.servers.client.domain.server.metadata.ServerMetadata; import com.centurylink.cloud.sdk.servers.services.domain.Response; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterTest; import org.testng.annotations.Test; import static com.centurylink.cloud.sdk.servers.services.TestServerSupport.anyServerConfig; /** * @author ilya.drabenia */ @Test(groups = "LongRunning") public class CreateServerAsyncTest extends AbstractServersSdkTest { @Inject ServerService serverService; Response<ServerMetadata> createServerResponse; @Test public void testCreateServerAsync() throws Exception { ListenableFuture<Response<ServerMetadata>> future = serverService.createAsync(anyServerConfig().name("CSAC")); createServerResponse = future.get(); assert createServerResponse.getResult().getId() != null; } @AfterMethod public void deleteTestServer() { createServerResponse .waitUntilComplete(); serverService .delete(createServerResponse.getResult().asRefById()) .waitUntilComplete(); } }
package com.centurylink.cloud.sdk.servers.services; import com.centurylink.cloud.sdk.servers.AbstractServersSdkTest; import com.centurylink.cloud.sdk.servers.client.domain.server.metadata.ServerMetadata; import com.centurylink.cloud.sdk.servers.services.domain.Response; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterTest; import org.testng.annotations.Test; import static com.centurylink.cloud.sdk.servers.services.TestServerSupport.anyServerConfig; /** * @author ilya.drabenia */ @Test(groups = "LongRunning") public class CreateServerAsyncTest extends AbstractServersSdkTest { @Inject ServerService serverService; Response<ServerMetadata> createServerResponse; @Test public void testCreateServerAsync() throws Exception { ListenableFuture<Response<ServerMetadata>> future = serverService.createAsync(anyServerConfig().name("CSAC")); createServerResponse = future.get(); assert createServerResponse.getResult().getId() != null; } @AfterMethod public void deleteTestServer() { createServerResponse .waitUntilComplete(); serverService .delete(createServerResponse.getResult().asRefById()) .waitUntilComplete(); } }
Improve integration test tear down functionality
Improve integration test tear down functionality
Java
apache-2.0
CenturyLinkCloud/clc-java-sdk,CenturyLinkCloud/clc-java-sdk,CenturyLinkCloud/clc-java-sdk
java
## Code Before: package com.centurylink.cloud.sdk.servers.services; import com.centurylink.cloud.sdk.servers.AbstractServersSdkTest; import com.centurylink.cloud.sdk.servers.client.domain.server.metadata.ServerMetadata; import com.centurylink.cloud.sdk.servers.services.domain.Response; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterTest; import org.testng.annotations.Test; import static com.centurylink.cloud.sdk.servers.services.TestServerSupport.anyServerConfig; /** * @author ilya.drabenia */ @Test(groups = "LongRunning") public class CreateServerAsyncTest extends AbstractServersSdkTest { @Inject ServerService serverService; Response<ServerMetadata> createServerResponse; @Test public void testCreateServerAsync() throws Exception { ListenableFuture<Response<ServerMetadata>> future = serverService.createAsync(anyServerConfig().name("CSAC")); createServerResponse = future.get(); assert createServerResponse.getResult().getId() != null; } @AfterMethod public void deleteTestServer() { createServerResponse .waitUntilComplete(); serverService .delete(createServerResponse.getResult().asRefById()) .waitUntilComplete(); } } ## Instruction: Improve integration test tear down functionality ## Code After: package com.centurylink.cloud.sdk.servers.services; import com.centurylink.cloud.sdk.servers.AbstractServersSdkTest; import com.centurylink.cloud.sdk.servers.client.domain.server.metadata.ServerMetadata; import com.centurylink.cloud.sdk.servers.services.domain.Response; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterTest; import org.testng.annotations.Test; import static com.centurylink.cloud.sdk.servers.services.TestServerSupport.anyServerConfig; /** * @author ilya.drabenia */ @Test(groups = "LongRunning") public class CreateServerAsyncTest extends AbstractServersSdkTest { @Inject ServerService serverService; Response<ServerMetadata> createServerResponse; @Test public void testCreateServerAsync() throws Exception { ListenableFuture<Response<ServerMetadata>> future = serverService.createAsync(anyServerConfig().name("CSAC")); createServerResponse = future.get(); assert createServerResponse.getResult().getId() != null; } @AfterMethod public void deleteTestServer() { createServerResponse .waitUntilComplete(); serverService .delete(createServerResponse.getResult().asRefById()) .waitUntilComplete(); } }
// ... existing code ... @AfterMethod public void deleteTestServer() { createServerResponse .waitUntilComplete(); serverService .delete(createServerResponse.getResult().asRefById()) .waitUntilComplete(); } } // ... rest of the code ...
b8f948b58b06648c94fb746ae519a44a7e96ae15
tools/perf/perf_tools/kraken.py
tools/perf/perf_tools/kraken.py
from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 300) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 500, poll_interval=5) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
Increase Kraken timeout to allow it to pass on Android.
[Telemetry] Increase Kraken timeout to allow it to pass on Android. BUG=163680 TEST=tools/perf/run_multipage_benchmarks --browser=android-content-shell kraken tools/perf/page_sets/kraken.json Review URL: https://chromiumcodereview.appspot.com/11519015 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@172374 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,M4sse/chromium.src,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,ltilve/chromium,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,patrickm/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,jaruba/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,Pluto-tv/chromium-crosswalk,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,dushu1203/chromium.src,anirudhSK/chromium,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,hujiajie/pa-chromium,jaruba/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,hujiajie/pa-chromium,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,Jonekee/chromium.src,Chilledheart/chromium,zcbenz/cefode-chromium,jaruba/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,anirudhSK/chromium,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,dushu1203/chromium.src,ChromiumWebApps/chromium,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,hgl888/chromium-crosswalk,patrickm/chromium.src,dednal/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,bright-sparks/chromium-spacewalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,Jonekee/chromium.src,dednal/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,anirudhSK/chromium,zcbenz/cefode-chromium,Chilledheart/chromium,ChromiumWebApps/chromium,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,hujiajie/pa-chromium,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,dednal/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,ltilve/chromium,jaruba/chromium.src,Just-D/chromium-1,fujunwei/chromium-crosswalk,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,ChromiumWebApps/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,markYoungH/chromium.src,littlstar/chromium.src,ChromiumWebApps/chromium,anirudhSK/chromium,hujiajie/pa-chromium,hujiajie/pa-chromium,M4sse/chromium.src,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,dednal/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,zcbenz/cefode-chromium,markYoungH/chromium.src,dednal/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,markYoungH/chromium.src,dushu1203/chromium.src,nacl-webkit/chrome_deps,markYoungH/chromium.src,zcbenz/cefode-chromium,jaruba/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,M4sse/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,dednal/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mogoweb/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,hujiajie/pa-chromium,M4sse/chromium.src,ltilve/chromium,anirudhSK/chromium,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,ondra-novak/chromium.src,patrickm/chromium.src,timopulkkinen/BubbleFish,littlstar/chromium.src,hgl888/chromium-crosswalk,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,dushu1203/chromium.src,jaruba/chromium.src,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,zcbenz/cefode-chromium,axinging/chromium-crosswalk,markYoungH/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,anirudhSK/chromium,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src
python
## Code Before: from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 300) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total) ## Instruction: [Telemetry] Increase Kraken timeout to allow it to pass on Android. BUG=163680 TEST=tools/perf/run_multipage_benchmarks --browser=android-content-shell kraken tools/perf/page_sets/kraken.json Review URL: https://chromiumcodereview.appspot.com/11519015 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@172374 0039d316-1c4b-4281-b951-d872f2087c98 ## Code After: from telemetry import multi_page_benchmark from telemetry import util def _Mean(l): return float(sum(l)) / len(l) if len(l) > 0 else 0.0 class Kraken(multi_page_benchmark.MultiPageBenchmark): def MeasurePage(self, _, tab, results): js_is_done = """ document.title.indexOf("Results") != -1 && document.readyState == "complete" """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 500, poll_interval=5) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; decodeURIComponent(formElement.value.split("?")[1]); """ result_dict = eval(tab.runtime.Evaluate(js_get_results)) total = 0 for key in result_dict: if key == 'v': continue results.Add(key, 'ms', result_dict[key], data_type='unimportant') total += _Mean(result_dict[key]) results.Add('Total', 'ms', total)
// ... existing code ... """ def _IsDone(): return bool(tab.runtime.Evaluate(js_is_done)) util.WaitFor(_IsDone, 500, poll_interval=5) js_get_results = """ var formElement = document.getElementsByTagName("input")[0]; // ... rest of the code ...
8caf479fd3998b24b0fe5e7c06666c6eff243643
tapestry-core/src/test/java/org/apache/tapestry5/integration/app1/pages/ActionViaLinkDemo.java
tapestry-core/src/test/java/org/apache/tapestry5/integration/app1/pages/ActionViaLinkDemo.java
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.integration.app1.pages; import org.apache.tapestry5.ComponentResources; import org.apache.tapestry5.Link; import org.apache.tapestry5.PersistenceConstants; import org.apache.tapestry5.annotations.Persist; import org.apache.tapestry5.ioc.annotations.Inject; public class ActionViaLinkDemo { @Persist(PersistenceConstants.FLASH) private String message; @Inject private ComponentResources resources; Object[] onPassivate() { return new Object[] { }; } public String getMessage() { return message; } void onUpdateMessage(String message) { getActionURL(); this.message = message; } public String getActionURL() { Link link = resources.createEventLink("UpdateMessage", false, "from getActionURL()"); return link.toURI(); } }
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.integration.app1.pages; import org.apache.tapestry5.ComponentResources; import org.apache.tapestry5.Link; import org.apache.tapestry5.PersistenceConstants; import org.apache.tapestry5.annotations.Persist; import org.apache.tapestry5.ioc.annotations.Inject; public class ActionViaLinkDemo { @Persist(PersistenceConstants.FLASH) private String message; @Inject private ComponentResources resources; Object[] onPassivate() { return new Object[]{}; } public String getMessage() { return message; } void onUpdateMessage(String message) { this.message = message; } public String getActionURL() { Link link = resources.createEventLink("UpdateMessage", "from getActionURL()"); return link.toURI(); } }
Make a correction to how the event link is created
Make a correction to how the event link is created
Java
apache-2.0
apache/tapestry-5,apache/tapestry-5,apache/tapestry-5,apache/tapestry-5,apache/tapestry-5
java
## Code Before: // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.integration.app1.pages; import org.apache.tapestry5.ComponentResources; import org.apache.tapestry5.Link; import org.apache.tapestry5.PersistenceConstants; import org.apache.tapestry5.annotations.Persist; import org.apache.tapestry5.ioc.annotations.Inject; public class ActionViaLinkDemo { @Persist(PersistenceConstants.FLASH) private String message; @Inject private ComponentResources resources; Object[] onPassivate() { return new Object[] { }; } public String getMessage() { return message; } void onUpdateMessage(String message) { getActionURL(); this.message = message; } public String getActionURL() { Link link = resources.createEventLink("UpdateMessage", false, "from getActionURL()"); return link.toURI(); } } ## Instruction: Make a correction to how the event link is created ## Code After: // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.integration.app1.pages; import org.apache.tapestry5.ComponentResources; import org.apache.tapestry5.Link; import org.apache.tapestry5.PersistenceConstants; import org.apache.tapestry5.annotations.Persist; import org.apache.tapestry5.ioc.annotations.Inject; public class ActionViaLinkDemo { @Persist(PersistenceConstants.FLASH) private String message; @Inject private ComponentResources resources; Object[] onPassivate() { return new Object[]{}; } public String getMessage() { return message; } void onUpdateMessage(String message) { this.message = message; } public String getActionURL() { Link link = resources.createEventLink("UpdateMessage", "from getActionURL()"); return link.toURI(); } }
... Object[] onPassivate() { return new Object[]{}; } public String getMessage() ... void onUpdateMessage(String message) { this.message = message; } public String getActionURL() { Link link = resources.createEventLink("UpdateMessage", "from getActionURL()"); return link.toURI(); } ...
f2c9e4ef2fd88ddebec6d885eb6a9b40efdea4de
simple-examples/static.c
simple-examples/static.c
/* Static functions and static global variables are only visible in * the file that it is declared in. Static variables inside of a * function have a different meaning and is described below */ void foo() { int x = 0; static int staticx = 0; // initialized once; keeps value between invocations of foo(). x++; staticx++; printf("x=%d\n", x); printf("staticx=%d\n", staticx); } int main(void) { for(int i=0; i<10; i++) foo(); }
// Scott Kuhl #include <stdio.h> /* Static functions and static global variables are only visible in * the file that it is declared in. Static variables inside of a * function have a different meaning and is described below */ void foo() { int x = 0; static int staticx = 0; // initialized once; keeps value between invocations of foo(). x++; staticx++; printf("x=%d\n", x); printf("staticx=%d\n", staticx); } int main(void) { for(int i=0; i<10; i++) foo(); }
Add name to top of file.
Add name to top of file.
C
unlicense
skuhl/sys-prog-examples,skuhl/sys-prog-examples,skuhl/sys-prog-examples
c
## Code Before: /* Static functions and static global variables are only visible in * the file that it is declared in. Static variables inside of a * function have a different meaning and is described below */ void foo() { int x = 0; static int staticx = 0; // initialized once; keeps value between invocations of foo(). x++; staticx++; printf("x=%d\n", x); printf("staticx=%d\n", staticx); } int main(void) { for(int i=0; i<10; i++) foo(); } ## Instruction: Add name to top of file. ## Code After: // Scott Kuhl #include <stdio.h> /* Static functions and static global variables are only visible in * the file that it is declared in. Static variables inside of a * function have a different meaning and is described below */ void foo() { int x = 0; static int staticx = 0; // initialized once; keeps value between invocations of foo(). x++; staticx++; printf("x=%d\n", x); printf("staticx=%d\n", staticx); } int main(void) { for(int i=0; i<10; i++) foo(); }
// ... existing code ... // Scott Kuhl #include <stdio.h> /* Static functions and static global variables are only visible in * the file that it is declared in. Static variables inside of a // ... rest of the code ...
cad7093a3175868944acf1d2f62bad523e4f8a41
tests/unit/utils/test_thin.py
tests/unit/utils/test_thin.py
''' :codeauthor: :email:`Bo Maryniuk <[email protected]>` ''' from __future__ import absolute_import, print_function, unicode_literals import datetime from tests.support.unit import TestCase, skipIf from tests.support.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salt.ext.six.moves import zip from salt.ext import six import salt.utils.ssdp as ssdp import salt.utils.stringutils try: import pytest except ImportError: pytest = None @skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(pytest is None, 'PyTest is missing') class SSHThinTestCase(TestCase): ''' TestCase for SaltSSH-related parts. ''' def test_get_tops(self): ''' Test thin.get_tops :return: '''
''' :codeauthor: :email:`Bo Maryniuk <[email protected]>` ''' from __future__ import absolute_import, print_function, unicode_literals import datetime from tests.support.unit import TestCase, skipIf from tests.support.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salt.ext.six.moves import zip from salt.ext import six from salt.utils import thin import salt.utils.stringutils try: import pytest except ImportError: pytest = None class SaltSyetemExitException(Exception): ''' System ''' def __init__(self): Exception.__init__(self, 'The Dilithium Crystals need to be rotated.') @skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(pytest is None, 'PyTest is missing') class SSHThinTestCase(TestCase): ''' TestCase for SaltSSH-related parts. ''' @patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException)) @patch('salt.utils.thin.log', MagicMock()) def test_get_ext_tops_cfg_missing_dependencies(self): ''' Test thin.get_tops :return: ''' cfg = [ {'namespace': {'path': '/foo', 'dependencies': []}}, ] with pytest.raises(Exception) as err: thin.get_ext_tops(cfg) assert 'Dilithium Crystals' in str(err) assert thin.log.error.called assert 'Missing dependencies' in thin.log.error.call_args[0][0] assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
Add unit test for missing dependencies on get_ext_tops
Add unit test for missing dependencies on get_ext_tops
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
python
## Code Before: ''' :codeauthor: :email:`Bo Maryniuk <[email protected]>` ''' from __future__ import absolute_import, print_function, unicode_literals import datetime from tests.support.unit import TestCase, skipIf from tests.support.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salt.ext.six.moves import zip from salt.ext import six import salt.utils.ssdp as ssdp import salt.utils.stringutils try: import pytest except ImportError: pytest = None @skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(pytest is None, 'PyTest is missing') class SSHThinTestCase(TestCase): ''' TestCase for SaltSSH-related parts. ''' def test_get_tops(self): ''' Test thin.get_tops :return: ''' ## Instruction: Add unit test for missing dependencies on get_ext_tops ## Code After: ''' :codeauthor: :email:`Bo Maryniuk <[email protected]>` ''' from __future__ import absolute_import, print_function, unicode_literals import datetime from tests.support.unit import TestCase, skipIf from tests.support.mock import ( NO_MOCK, NO_MOCK_REASON, MagicMock, patch) from salt.ext.six.moves import zip from salt.ext import six from salt.utils import thin import salt.utils.stringutils try: import pytest except ImportError: pytest = None class SaltSyetemExitException(Exception): ''' System ''' def __init__(self): Exception.__init__(self, 'The Dilithium Crystals need to be rotated.') @skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(pytest is None, 'PyTest is missing') class SSHThinTestCase(TestCase): ''' TestCase for SaltSSH-related parts. ''' @patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException)) @patch('salt.utils.thin.log', MagicMock()) def test_get_ext_tops_cfg_missing_dependencies(self): ''' Test thin.get_tops :return: ''' cfg = [ {'namespace': {'path': '/foo', 'dependencies': []}}, ] with pytest.raises(Exception) as err: thin.get_ext_tops(cfg) assert 'Dilithium Crystals' in str(err) assert thin.log.error.called assert 'Missing dependencies' in thin.log.error.call_args[0][0] assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0]
... from salt.ext.six.moves import zip from salt.ext import six from salt.utils import thin import salt.utils.stringutils try: ... pytest = None class SaltSyetemExitException(Exception): ''' System ''' def __init__(self): Exception.__init__(self, 'The Dilithium Crystals need to be rotated.') @skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(pytest is None, 'PyTest is missing') class SSHThinTestCase(TestCase): ... ''' TestCase for SaltSSH-related parts. ''' @patch('salt.exceptions.SaltSystemExit', MagicMock(side_effect=SaltSyetemExitException)) @patch('salt.utils.thin.log', MagicMock()) def test_get_ext_tops_cfg_missing_dependencies(self): ''' Test thin.get_tops :return: ''' cfg = [ {'namespace': {'path': '/foo', 'dependencies': []}}, ] with pytest.raises(Exception) as err: thin.get_ext_tops(cfg) assert 'Dilithium Crystals' in str(err) assert thin.log.error.called assert 'Missing dependencies' in thin.log.error.call_args[0][0] assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0] ...
7eb57a623b4f915dac0cdf7fdd0eb74d77c51b5c
setup.py
setup.py
import sys from setuptools import setup install_requires = [] if sys.version_info < (2, 7): raise DeprecationWarning('Python 2.6 and older are no longer supported by PAY.JP. ') install_requires.append('requests >= 2.7.0') install_requires.append('six >= 1.9.0') setup( name="payjp", version="0.0.1", description='PAY.JP python bindings', author="PAY.JP", author_email='[email protected]', packages=['payjp', 'payjp.test'], url='https://github.com/payjp/payjp-python', install_requires=install_requires, test_suite='payjp.test.all', )
import sys from setuptools import setup install_requires = [] if sys.version_info < (2, 7): raise DeprecationWarning('Python 2.6 and older are no longer supported by PAY.JP. ') install_requires.append('requests >= 2.7.0') install_requires.append('six >= 1.9.0') setup( name="payjp", version="0.0.1", description='PAY.JP python bindings', author="PAY.JP", author_email='[email protected]', packages=['payjp', 'payjp.test'], url='https://github.com/payjp/payjp-python', install_requires=install_requires, tests_require=[ 'mock >= 1.3.0' ], test_suite='payjp.test.all', )
Add missing dependency required to run tests.
Add missing dependency required to run tests.
Python
mit
payjp/payjp-python
python
## Code Before: import sys from setuptools import setup install_requires = [] if sys.version_info < (2, 7): raise DeprecationWarning('Python 2.6 and older are no longer supported by PAY.JP. ') install_requires.append('requests >= 2.7.0') install_requires.append('six >= 1.9.0') setup( name="payjp", version="0.0.1", description='PAY.JP python bindings', author="PAY.JP", author_email='[email protected]', packages=['payjp', 'payjp.test'], url='https://github.com/payjp/payjp-python', install_requires=install_requires, test_suite='payjp.test.all', ) ## Instruction: Add missing dependency required to run tests. ## Code After: import sys from setuptools import setup install_requires = [] if sys.version_info < (2, 7): raise DeprecationWarning('Python 2.6 and older are no longer supported by PAY.JP. ') install_requires.append('requests >= 2.7.0') install_requires.append('six >= 1.9.0') setup( name="payjp", version="0.0.1", description='PAY.JP python bindings', author="PAY.JP", author_email='[email protected]', packages=['payjp', 'payjp.test'], url='https://github.com/payjp/payjp-python', install_requires=install_requires, tests_require=[ 'mock >= 1.3.0' ], test_suite='payjp.test.all', )
// ... existing code ... packages=['payjp', 'payjp.test'], url='https://github.com/payjp/payjp-python', install_requires=install_requires, tests_require=[ 'mock >= 1.3.0' ], test_suite='payjp.test.all', ) // ... rest of the code ...
666d9c999ebf0cc388d8f045a04756424c2d9b62
gdemo/util.py
gdemo/util.py
"""Share utility functions.""" from urllib import parse def get_route_value(environ, name): value = environ['wsgiorg.routing_args'][1][name] value = parse.unquote(value) return value.replace('%2F', '/')
"""Share utility functions.""" try: from urllib import parse except ImportError: import urllib as parse def get_route_value(environ, name): value = environ['wsgiorg.routing_args'][1][name] value = parse.unquote(value) return value.replace('%2F', '/')
Make it work for Python 2
Make it work for Python 2 Gabbi is designed to work with both Python 2.7 and 3.4.
Python
apache-2.0
cdent/gabbi-demo,cdent/gabbi-demo
python
## Code Before: """Share utility functions.""" from urllib import parse def get_route_value(environ, name): value = environ['wsgiorg.routing_args'][1][name] value = parse.unquote(value) return value.replace('%2F', '/') ## Instruction: Make it work for Python 2 Gabbi is designed to work with both Python 2.7 and 3.4. ## Code After: """Share utility functions.""" try: from urllib import parse except ImportError: import urllib as parse def get_route_value(environ, name): value = environ['wsgiorg.routing_args'][1][name] value = parse.unquote(value) return value.replace('%2F', '/')
# ... existing code ... """Share utility functions.""" try: from urllib import parse except ImportError: import urllib as parse def get_route_value(environ, name): # ... rest of the code ...
0dd5966c9a89fe6e59be22771be128221dab8dd8
src/java/com/threerings/whirled/spot/data/Cluster.java
src/java/com/threerings/whirled/spot/data/Cluster.java
// // $Id: Cluster.java,v 1.2 2003/02/13 23:01:35 mdb Exp $ package com.threerings.whirled.spot.data; import com.threerings.presents.dobj.DSet; /** * Contains information on clusters. */ public class Cluster implements DSet.Entry { /** A unique identifier for this cluster (also the distributed object * id of the cluster chat object). */ public int clusterOid; /** The x-coordinate of the cluster in the scene. */ public int x; /** The y-coordinate of the cluster in the scene. */ public int y; /** The number of occupants in this cluster. */ public int occupants; // documentation inherited public Comparable getKey () { if (_key == null) { _key = new Integer(clusterOid); } return _key; } // documentation inherited public boolean equals (Object other) { if (other instanceof Cluster) { return ((Cluster)other).clusterOid == clusterOid; } else { return false; } } // documentation inherited public int hashCode () { return clusterOid; } /** Used for {@link #geyKey}. */ protected transient Integer _key; }
// // $Id: Cluster.java,v 1.3 2003/03/25 03:16:11 mdb Exp $ package com.threerings.whirled.spot.data; import com.threerings.io.SimpleStreamableObject; import com.threerings.presents.dobj.DSet; /** * Contains information on clusters. */ public class Cluster extends SimpleStreamableObject implements DSet.Entry { /** A unique identifier for this cluster (also the distributed object * id of the cluster chat object). */ public int clusterOid; /** The x-coordinate of the cluster in the scene. */ public int x; /** The y-coordinate of the cluster in the scene. */ public int y; /** The number of occupants in this cluster. */ public int occupants; // documentation inherited public Comparable getKey () { if (_key == null) { _key = new Integer(clusterOid); } return _key; } // documentation inherited public boolean equals (Object other) { if (other instanceof Cluster) { return ((Cluster)other).clusterOid == clusterOid; } else { return false; } } // documentation inherited public int hashCode () { return clusterOid; } /** Used for {@link #geyKey}. */ protected transient Integer _key; }
Extend SimpleStreamableObject so that we get a useful default toString().
Extend SimpleStreamableObject so that we get a useful default toString(). git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@2321 542714f4-19e9-0310-aa3c-eee0fc999fb1
Java
lgpl-2.1
threerings/narya,threerings/narya,threerings/narya,threerings/narya,threerings/narya
java
## Code Before: // // $Id: Cluster.java,v 1.2 2003/02/13 23:01:35 mdb Exp $ package com.threerings.whirled.spot.data; import com.threerings.presents.dobj.DSet; /** * Contains information on clusters. */ public class Cluster implements DSet.Entry { /** A unique identifier for this cluster (also the distributed object * id of the cluster chat object). */ public int clusterOid; /** The x-coordinate of the cluster in the scene. */ public int x; /** The y-coordinate of the cluster in the scene. */ public int y; /** The number of occupants in this cluster. */ public int occupants; // documentation inherited public Comparable getKey () { if (_key == null) { _key = new Integer(clusterOid); } return _key; } // documentation inherited public boolean equals (Object other) { if (other instanceof Cluster) { return ((Cluster)other).clusterOid == clusterOid; } else { return false; } } // documentation inherited public int hashCode () { return clusterOid; } /** Used for {@link #geyKey}. */ protected transient Integer _key; } ## Instruction: Extend SimpleStreamableObject so that we get a useful default toString(). git-svn-id: a1a4b28b82a3276cc491891159dd9963a0a72fae@2321 542714f4-19e9-0310-aa3c-eee0fc999fb1 ## Code After: // // $Id: Cluster.java,v 1.3 2003/03/25 03:16:11 mdb Exp $ package com.threerings.whirled.spot.data; import com.threerings.io.SimpleStreamableObject; import com.threerings.presents.dobj.DSet; /** * Contains information on clusters. */ public class Cluster extends SimpleStreamableObject implements DSet.Entry { /** A unique identifier for this cluster (also the distributed object * id of the cluster chat object). */ public int clusterOid; /** The x-coordinate of the cluster in the scene. */ public int x; /** The y-coordinate of the cluster in the scene. */ public int y; /** The number of occupants in this cluster. */ public int occupants; // documentation inherited public Comparable getKey () { if (_key == null) { _key = new Integer(clusterOid); } return _key; } // documentation inherited public boolean equals (Object other) { if (other instanceof Cluster) { return ((Cluster)other).clusterOid == clusterOid; } else { return false; } } // documentation inherited public int hashCode () { return clusterOid; } /** Used for {@link #geyKey}. */ protected transient Integer _key; }
... // // $Id: Cluster.java,v 1.3 2003/03/25 03:16:11 mdb Exp $ package com.threerings.whirled.spot.data; import com.threerings.io.SimpleStreamableObject; import com.threerings.presents.dobj.DSet; /** * Contains information on clusters. */ public class Cluster extends SimpleStreamableObject implements DSet.Entry { /** A unique identifier for this cluster (also the distributed object ...
3327c204f34a725a2d070beb24a7a5a66d414930
migrations/versions/538eeb160af6_.py
migrations/versions/538eeb160af6_.py
# revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '1727fb4309d8' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ###
# revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '6b9d673d8e30' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ###
Update alembic order for merging
Update alembic order for merging
Python
apache-2.0
bunjiboys/security_monkey,stackArmor/security_monkey,markofu/security_monkey,bunjiboys/security_monkey,bunjiboys/security_monkey,markofu/security_monkey,markofu/security_monkey,Netflix/security_monkey,stackArmor/security_monkey,Netflix/security_monkey,Netflix/security_monkey,stackArmor/security_monkey,Netflix/security_monkey,bunjiboys/security_monkey,Netflix/security_monkey,bunjiboys/security_monkey,stackArmor/security_monkey,stackArmor/security_monkey,markofu/security_monkey,markofu/security_monkey
python
## Code Before: # revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '1727fb4309d8' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ### ## Instruction: Update alembic order for merging ## Code After: # revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '6b9d673d8e30' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('user', 'role') ### end Alembic commands ###
# ... existing code ... # revision identifiers, used by Alembic. revision = '538eeb160af6' down_revision = '6b9d673d8e30' from alembic import op import sqlalchemy as sa # ... rest of the code ...
40e02789773d9a5c289b960d73045dcaf88c6385
miru-plugin/src/test/java/com/jivesoftware/os/miru/plugin/query/TermAnalyzersTest.java
miru-plugin/src/test/java/com/jivesoftware/os/miru/plugin/query/TermAnalyzersTest.java
package com.jivesoftware.os.miru.plugin.query; import com.google.common.collect.Lists; import org.apache.lucene.analysis.Analyzer; import org.testng.annotations.Test; import java.util.List; import static org.testng.Assert.*; public class TermAnalyzersTest { @Test public void testFindAnalyzer() throws Exception { TermAnalyzers termAnalyzers = new TermAnalyzers(); List<String> localeList = Lists.newArrayList("en", "zh_cn", "foo_bar_bazz"); for (String locale : localeList) { Analyzer analyzer = termAnalyzers.findAnalyzer(locale, false); assertEquals("6.2.1", analyzer.getVersion().toString()); System.out.println(locale + " v" + analyzer.getVersion()); } } }
package com.jivesoftware.os.miru.plugin.query; import com.google.common.collect.Lists; import org.apache.lucene.analysis.Analyzer; import org.testng.annotations.Test; import static org.testng.Assert.assertEquals; public class TermAnalyzersTest { @Test public void testFindAnalyzer() throws Exception { TermAnalyzers termAnalyzers = new TermAnalyzers(); for (String locale : Lists.newArrayList("en", "zh_cn", "foo_bar_bazz")) { Analyzer analyzer = termAnalyzers.findAnalyzer(locale, false); assertEquals("6.2.1", analyzer.getVersion().toString()); } } }
Clean up analyzer unit test
Clean up analyzer unit test
Java
apache-2.0
jivesoftware/miru,jivesoftware/miru,jivesoftware/miru,jivesoftware/miru
java
## Code Before: package com.jivesoftware.os.miru.plugin.query; import com.google.common.collect.Lists; import org.apache.lucene.analysis.Analyzer; import org.testng.annotations.Test; import java.util.List; import static org.testng.Assert.*; public class TermAnalyzersTest { @Test public void testFindAnalyzer() throws Exception { TermAnalyzers termAnalyzers = new TermAnalyzers(); List<String> localeList = Lists.newArrayList("en", "zh_cn", "foo_bar_bazz"); for (String locale : localeList) { Analyzer analyzer = termAnalyzers.findAnalyzer(locale, false); assertEquals("6.2.1", analyzer.getVersion().toString()); System.out.println(locale + " v" + analyzer.getVersion()); } } } ## Instruction: Clean up analyzer unit test ## Code After: package com.jivesoftware.os.miru.plugin.query; import com.google.common.collect.Lists; import org.apache.lucene.analysis.Analyzer; import org.testng.annotations.Test; import static org.testng.Assert.assertEquals; public class TermAnalyzersTest { @Test public void testFindAnalyzer() throws Exception { TermAnalyzers termAnalyzers = new TermAnalyzers(); for (String locale : Lists.newArrayList("en", "zh_cn", "foo_bar_bazz")) { Analyzer analyzer = termAnalyzers.findAnalyzer(locale, false); assertEquals("6.2.1", analyzer.getVersion().toString()); } } }
# ... existing code ... import org.apache.lucene.analysis.Analyzer; import org.testng.annotations.Test; import static org.testng.Assert.assertEquals; public class TermAnalyzersTest { @Test public void testFindAnalyzer() throws Exception { TermAnalyzers termAnalyzers = new TermAnalyzers(); for (String locale : Lists.newArrayList("en", "zh_cn", "foo_bar_bazz")) { Analyzer analyzer = termAnalyzers.findAnalyzer(locale, false); assertEquals("6.2.1", analyzer.getVersion().toString()); } } # ... rest of the code ...
cc3f475345a6a0885eea7bc7ba41ebabd2821488
src/damis/models.py
src/damis/models.py
from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DatetimeField(auto_now=True) created = models.DatetimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DatetimeField(auto_now=True) created = models.DatetimeField(auto_now_add=True) class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField() file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DatetimeField(auto_now=True) created = models.DatetimeField(auto_now_add=True)
from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True)
Add dataset upload_to attribute. Fix DateTimeField name.
Add dataset upload_to attribute. Fix DateTimeField name.
Python
agpl-3.0
InScience/DAMIS-old,InScience/DAMIS-old
python
## Code Before: from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DatetimeField(auto_now=True) created = models.DatetimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DatetimeField(auto_now=True) created = models.DatetimeField(auto_now_add=True) class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField() file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DatetimeField(auto_now=True) created = models.DatetimeField(auto_now_add=True) ## Instruction: Add dataset upload_to attribute. Fix DateTimeField name. ## Code After: from django.db import models from django.contrib.auth.models import User class DatasetLicence(models.Model): title = models.CharField(max_length=255) short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True)
# ... existing code ... short_title = models.CharField(max_length=30) url = models.URLField() summary = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) class FileFormat(models.Model): extension = models.CharField(max_length=10) description = models.TextField() updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) def get_dataset_upload_path(self, instance, filename): return '/%s/' % instance.author.username class Dataset(models.Model): title = models.CharField(max_length=255) licence = models.ForeignKey('DatasetLicence') file = models.FileField(upload_to=get_dataset_upload_path) file_format = models.ForeignKey('FileFormat') description = models.TextField() author = models.ForeignKey(User) updated = models.DateTimeField(auto_now=True) created = models.DateTimeField(auto_now_add=True) # ... rest of the code ...
a35b6e46bd9d443f07391f37f5e0e384e37608bb
nbgrader/tests/test_nbgrader_feedback.py
nbgrader/tests/test_nbgrader_feedback.py
from .base import TestBase from nbgrader.api import Gradebook import os class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("Problem Set 1") gb.add_student("foo") gb.add_student("bar") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() self._run_command( 'nbgrader autograde submitted-unchanged.ipynb ' '--db="{}" ' '--assignment="Problem Set 1" ' '--AssignmentExporter.notebook_id=teacher ' '--student=foo'.format(dbpath)) self._run_command( 'nbgrader feedback submitted-unchanged.nbconvert.ipynb ' '--db="{}" ' '--assignment="Problem Set 1" ' '--AssignmentExporter.notebook_id=teacher ' '--student=foo'.format(dbpath)) assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html')
from .base import TestBase from nbgrader.api import Gradebook import os import shutil class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("ps1") gb.add_student("foo") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() os.makedirs('source/ps1') shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb') self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath)) os.makedirs('submitted/foo/ps1') shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb') self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath)) self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath)) assert os.path.exists('feedback/foo/ps1/p1.html')
Update tests for nbgrader feedback
Update tests for nbgrader feedback
Python
bsd-3-clause
jhamrick/nbgrader,alope107/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,modulexcite/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jdfreder/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,alope107/nbgrader,jdfreder/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader
python
## Code Before: from .base import TestBase from nbgrader.api import Gradebook import os class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("Problem Set 1") gb.add_student("foo") gb.add_student("bar") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() self._run_command( 'nbgrader autograde submitted-unchanged.ipynb ' '--db="{}" ' '--assignment="Problem Set 1" ' '--AssignmentExporter.notebook_id=teacher ' '--student=foo'.format(dbpath)) self._run_command( 'nbgrader feedback submitted-unchanged.nbconvert.ipynb ' '--db="{}" ' '--assignment="Problem Set 1" ' '--AssignmentExporter.notebook_id=teacher ' '--student=foo'.format(dbpath)) assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html') ## Instruction: Update tests for nbgrader feedback ## Code After: from .base import TestBase from nbgrader.api import Gradebook import os import shutil class TestNbgraderFeedback(TestBase): def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("ps1") gb.add_student("foo") return dbpath def test_help(self): """Does the help display without error?""" with self._temp_cwd(): self._run_command("nbgrader feedback --help-all") def test_single_file(self): """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() os.makedirs('source/ps1') shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb') self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath)) os.makedirs('submitted/foo/ps1') shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb') self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath)) self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath)) assert os.path.exists('feedback/foo/ps1/p1.html')
... from nbgrader.api import Gradebook import os import shutil class TestNbgraderFeedback(TestBase): ... def _setup_db(self): dbpath = self._init_db() gb = Gradebook(dbpath) gb.add_assignment("ps1") gb.add_student("foo") return dbpath def test_help(self): ... """Can feedback be generated for an unchanged assignment?""" with self._temp_cwd(["files/submitted-unchanged.ipynb"]): dbpath = self._setup_db() os.makedirs('source/ps1') shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb') self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath)) os.makedirs('submitted/foo/ps1') shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb') self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath)) self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath)) assert os.path.exists('feedback/foo/ps1/p1.html') ...
5093b83948d9ba809661ad311b6893ff2d17f23d
link/src/test/java/jamex/link/MessageCollector.java
link/src/test/java/jamex/link/MessageCollector.java
package jamex.link; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import javax.jms.Message; import javax.jms.MessageListener; import static org.junit.Assert.*; final class MessageCollector implements MessageListener { private static final int MAX_MESSAGE_COUNT = 10; private final LinkedBlockingQueue<Message> m_messages = new LinkedBlockingQueue<Message>( MAX_MESSAGE_COUNT ); @Override public void onMessage( final Message message ) { m_messages.add( message ); } Collection<Message> expectMessageCount( final int expectedMessageCount, final long maxWait ) throws InterruptedException { final LinkedList<Message> results = new LinkedList<Message>(); final long start = System.currentTimeMillis(); long now; while( results.size() < expectedMessageCount && ( ( now = System.currentTimeMillis() ) < start + maxWait ) ) { final long waitTime = Math.max( 1, start + maxWait - now ); final Message message = m_messages.poll( waitTime, TimeUnit.MILLISECONDS ); results.add( message ); } assertEquals( "Expected message count", expectedMessageCount, results.size() ); return results; } }
package jamex.link; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import javax.jms.Message; import javax.jms.MessageListener; import static org.junit.Assert.*; final class MessageCollector implements MessageListener { private static final int MAX_MESSAGE_COUNT = 10; private static final long DEFAULT_WAIT = 100L; private final LinkedBlockingQueue<Message> m_messages = new LinkedBlockingQueue<Message>( MAX_MESSAGE_COUNT ); @Override public void onMessage( final Message message ) { m_messages.add( message ); } Collection<Message> expectMessageCount( final int expectedMessageCount ) throws InterruptedException { return expectMessageCount( expectedMessageCount, DEFAULT_WAIT ); } Collection<Message> expectMessageCount( final int expectedMessageCount, final long maxWait ) throws InterruptedException { final LinkedList<Message> results = new LinkedList<Message>(); final long start = System.currentTimeMillis(); long now; while( results.size() < expectedMessageCount && ( ( now = System.currentTimeMillis() ) < start + maxWait ) ) { final long waitTime = Math.max( 1, start + maxWait - now ); final Message message = m_messages.poll( waitTime, TimeUnit.MILLISECONDS ); results.add( message ); } assertEquals( "Expected message count", expectedMessageCount, results.size() ); return results; } }
Update so collector has a default wait if unspecified
Update so collector has a default wait if unspecified
Java
apache-2.0
realityforge/jml,realityforge/jml
java
## Code Before: package jamex.link; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import javax.jms.Message; import javax.jms.MessageListener; import static org.junit.Assert.*; final class MessageCollector implements MessageListener { private static final int MAX_MESSAGE_COUNT = 10; private final LinkedBlockingQueue<Message> m_messages = new LinkedBlockingQueue<Message>( MAX_MESSAGE_COUNT ); @Override public void onMessage( final Message message ) { m_messages.add( message ); } Collection<Message> expectMessageCount( final int expectedMessageCount, final long maxWait ) throws InterruptedException { final LinkedList<Message> results = new LinkedList<Message>(); final long start = System.currentTimeMillis(); long now; while( results.size() < expectedMessageCount && ( ( now = System.currentTimeMillis() ) < start + maxWait ) ) { final long waitTime = Math.max( 1, start + maxWait - now ); final Message message = m_messages.poll( waitTime, TimeUnit.MILLISECONDS ); results.add( message ); } assertEquals( "Expected message count", expectedMessageCount, results.size() ); return results; } } ## Instruction: Update so collector has a default wait if unspecified ## Code After: package jamex.link; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import javax.jms.Message; import javax.jms.MessageListener; import static org.junit.Assert.*; final class MessageCollector implements MessageListener { private static final int MAX_MESSAGE_COUNT = 10; private static final long DEFAULT_WAIT = 100L; private final LinkedBlockingQueue<Message> m_messages = new LinkedBlockingQueue<Message>( MAX_MESSAGE_COUNT ); @Override public void onMessage( final Message message ) { m_messages.add( message ); } Collection<Message> expectMessageCount( final int expectedMessageCount ) throws InterruptedException { return expectMessageCount( expectedMessageCount, DEFAULT_WAIT ); } Collection<Message> expectMessageCount( final int expectedMessageCount, final long maxWait ) throws InterruptedException { final LinkedList<Message> results = new LinkedList<Message>(); final long start = System.currentTimeMillis(); long now; while( results.size() < expectedMessageCount && ( ( now = System.currentTimeMillis() ) < start + maxWait ) ) { final long waitTime = Math.max( 1, start + maxWait - now ); final Message message = m_messages.poll( waitTime, TimeUnit.MILLISECONDS ); results.add( message ); } assertEquals( "Expected message count", expectedMessageCount, results.size() ); return results; } }
... implements MessageListener { private static final int MAX_MESSAGE_COUNT = 10; private static final long DEFAULT_WAIT = 100L; private final LinkedBlockingQueue<Message> m_messages = new LinkedBlockingQueue<Message>( MAX_MESSAGE_COUNT ); ... m_messages.add( message ); } Collection<Message> expectMessageCount( final int expectedMessageCount ) throws InterruptedException { return expectMessageCount( expectedMessageCount, DEFAULT_WAIT ); } Collection<Message> expectMessageCount( final int expectedMessageCount, final long maxWait ) throws InterruptedException { final LinkedList<Message> results = new LinkedList<Message>(); ...
e36d31edd6020989bdb39f82dea54ef0e747e994
test/Driver/systemz-march.c
test/Driver/systemz-march.c
// Check that -march works for all supported targets. // RUN: not %clang -target s390x -S -emit-llvm -march=z9 %s -o - 2>&1 | FileCheck --check-prefix=CHECK-Z9 %s // RUN: %clang -target s390x -S -emit-llvm -march=z10 %s // RUN: %clang -target s390x -S -emit-llvm -march=z196 %s // RUN: %clang -target s390x -S -emit-llvm -march=zEC12 %s // CHECK-Z9: error: unknown target CPU 'z9' // CHECK-Z10: "-target-cpu" "z10" // CHECK-Z196: "-target-cpu" "z196" // CHECK-ZEC12: "-target-cpu" "zEC12" int x;
// Check that -march works for all supported targets. // RUN: not %clang -target s390x -S -emit-llvm -march=z9 %s -o - 2>&1 | FileCheck --check-prefix=CHECK-Z9 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=z10 %s 2>&1 | FileCheck --check-prefix=CHECK-Z10 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=z196 %s 2>&1 | FileCheck --check-prefix=CHECK-Z196 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=zEC12 %s 2>&1 | FileCheck --check-prefix=CHECK-ZEC12 %s // CHECK-Z9: error: unknown target CPU 'z9' // CHECK-Z10: "-target-cpu" "z10" // CHECK-Z196: "-target-cpu" "z196" // CHECK-ZEC12: "-target-cpu" "zEC12" int x;
Fix test to actually check things.
Fix test to actually check things. git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@186701 91177308-0d34-0410-b5e6-96231b3b80d8
C
apache-2.0
llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang
c
## Code Before: // Check that -march works for all supported targets. // RUN: not %clang -target s390x -S -emit-llvm -march=z9 %s -o - 2>&1 | FileCheck --check-prefix=CHECK-Z9 %s // RUN: %clang -target s390x -S -emit-llvm -march=z10 %s // RUN: %clang -target s390x -S -emit-llvm -march=z196 %s // RUN: %clang -target s390x -S -emit-llvm -march=zEC12 %s // CHECK-Z9: error: unknown target CPU 'z9' // CHECK-Z10: "-target-cpu" "z10" // CHECK-Z196: "-target-cpu" "z196" // CHECK-ZEC12: "-target-cpu" "zEC12" int x; ## Instruction: Fix test to actually check things. git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@186701 91177308-0d34-0410-b5e6-96231b3b80d8 ## Code After: // Check that -march works for all supported targets. // RUN: not %clang -target s390x -S -emit-llvm -march=z9 %s -o - 2>&1 | FileCheck --check-prefix=CHECK-Z9 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=z10 %s 2>&1 | FileCheck --check-prefix=CHECK-Z10 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=z196 %s 2>&1 | FileCheck --check-prefix=CHECK-Z196 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=zEC12 %s 2>&1 | FileCheck --check-prefix=CHECK-ZEC12 %s // CHECK-Z9: error: unknown target CPU 'z9' // CHECK-Z10: "-target-cpu" "z10" // CHECK-Z196: "-target-cpu" "z196" // CHECK-ZEC12: "-target-cpu" "zEC12" int x;
# ... existing code ... // Check that -march works for all supported targets. // RUN: not %clang -target s390x -S -emit-llvm -march=z9 %s -o - 2>&1 | FileCheck --check-prefix=CHECK-Z9 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=z10 %s 2>&1 | FileCheck --check-prefix=CHECK-Z10 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=z196 %s 2>&1 | FileCheck --check-prefix=CHECK-Z196 %s // RUN: %clang -target s390x -### -S -emit-llvm -march=zEC12 %s 2>&1 | FileCheck --check-prefix=CHECK-ZEC12 %s // CHECK-Z9: error: unknown target CPU 'z9' // CHECK-Z10: "-target-cpu" "z10" # ... rest of the code ...
602c01caa23df0c6dad5963412a340087012f692
thinc/tests/integration/test_shape_check.py
thinc/tests/integration/test_shape_check.py
import pytest import numpy from ...neural._classes.model import Model def test_mismatched_shapes_raise_ShapeError(): X = numpy.ones((3, 4)) model = Model(10, 5) with pytest.raises(ValueError): y = model.begin_training(X)
import pytest import numpy from ...neural._classes.model import Model from ...exceptions import UndefinedOperatorError, DifferentLengthError from ...exceptions import ExpectedTypeError, ShapeMismatchError def test_mismatched_shapes_raise_ShapeError(): X = numpy.ones((3, 4)) model = Model(10, 5) with pytest.raises(ShapeMismatchError): y = model.begin_training(X, X)
Update test and import errors
Update test and import errors
Python
mit
explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc
python
## Code Before: import pytest import numpy from ...neural._classes.model import Model def test_mismatched_shapes_raise_ShapeError(): X = numpy.ones((3, 4)) model = Model(10, 5) with pytest.raises(ValueError): y = model.begin_training(X) ## Instruction: Update test and import errors ## Code After: import pytest import numpy from ...neural._classes.model import Model from ...exceptions import UndefinedOperatorError, DifferentLengthError from ...exceptions import ExpectedTypeError, ShapeMismatchError def test_mismatched_shapes_raise_ShapeError(): X = numpy.ones((3, 4)) model = Model(10, 5) with pytest.raises(ShapeMismatchError): y = model.begin_training(X, X)
// ... existing code ... import numpy from ...neural._classes.model import Model from ...exceptions import UndefinedOperatorError, DifferentLengthError from ...exceptions import ExpectedTypeError, ShapeMismatchError def test_mismatched_shapes_raise_ShapeError(): X = numpy.ones((3, 4)) model = Model(10, 5) with pytest.raises(ShapeMismatchError): y = model.begin_training(X, X) // ... rest of the code ...
5972644fe7d0267849440d8e60509baba6e013a3
test/test_exception.py
test/test_exception.py
from mock import MagicMock import pyaem import unittest class TestPyAemException(unittest.TestCase): def test_init(self): exception = pyaem.PyAemException(123, 'somemessage') self.assertEqual(exception.code, 123) self.assertEqual(exception.message, 'somemessage') if __name__ == '__main__': unittest.main()
import pyaem import unittest class TestException(unittest.TestCase): def test_init(self): exception = pyaem.PyAemException(123, 'somemessage') self.assertEqual(exception.code, 123) self.assertEqual(exception.message, 'somemessage') if __name__ == '__main__': unittest.main()
Rename class name to be consistent with file name. Remove unused import.
Rename class name to be consistent with file name. Remove unused import.
Python
mit
Sensis/pyaem,wildone/pyaem
python
## Code Before: from mock import MagicMock import pyaem import unittest class TestPyAemException(unittest.TestCase): def test_init(self): exception = pyaem.PyAemException(123, 'somemessage') self.assertEqual(exception.code, 123) self.assertEqual(exception.message, 'somemessage') if __name__ == '__main__': unittest.main() ## Instruction: Rename class name to be consistent with file name. Remove unused import. ## Code After: import pyaem import unittest class TestException(unittest.TestCase): def test_init(self): exception = pyaem.PyAemException(123, 'somemessage') self.assertEqual(exception.code, 123) self.assertEqual(exception.message, 'somemessage') if __name__ == '__main__': unittest.main()
# ... existing code ... import pyaem import unittest class TestException(unittest.TestCase): def test_init(self): # ... rest of the code ...
e5d143cf5dfaab4a097a809e54b920e84c05b5eb
src/main/java/nuclibooktest/models/StaffTest.java
src/main/java/nuclibooktest/models/StaffTest.java
package nuclibooktest.models; import org.junit.After; import org.junit.Before; import static org.junit.Assert.*; public class StaffTest { @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } }
package nuclibooktest.models; import com.j256.ormlite.dao.Dao; import com.j256.ormlite.dao.DaoManager; import com.j256.ormlite.support.ConnectionSource; import nuclibook.models.CannotHashPasswordException; import nuclibook.models.Staff; import nuclibook.models.Tracer; import nuclibooktest.test_utils.TestSqlServerConnection; import org.junit.*; import java.sql.SQLException; import static org.junit.Assert.*; public class StaffTest { @Test public void setAndCheckPasswordTest() throws SQLException, CannotHashPasswordException { Staff staff = new Staff(); staff.setPassword("foobar"); assertTrue("Password check failed for set password 'foobar'", staff.checkPassword("foobar")); } @Test public void isInLastPasswordsTest() throws CannotHashPasswordException { Staff staff = new Staff(); staff.setPassword("foobar"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); staff.setPassword("foobar2"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); staff.setPassword("foobar3"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); staff.setPassword("foobar4"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar4")); staff.setPassword("foobar5"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar4")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar5")); assertFalse("isInLastPasswords() returned true when it should have returned false", staff.isInLastPasswords("foobar")); } }
Add password unit tests for staff model
Add password unit tests for staff model
Java
agpl-3.0
musalbas/Nuclibook,musalbas/Nuclibook,musalbas/Nuclibook
java
## Code Before: package nuclibooktest.models; import org.junit.After; import org.junit.Before; import static org.junit.Assert.*; public class StaffTest { @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } } ## Instruction: Add password unit tests for staff model ## Code After: package nuclibooktest.models; import com.j256.ormlite.dao.Dao; import com.j256.ormlite.dao.DaoManager; import com.j256.ormlite.support.ConnectionSource; import nuclibook.models.CannotHashPasswordException; import nuclibook.models.Staff; import nuclibook.models.Tracer; import nuclibooktest.test_utils.TestSqlServerConnection; import org.junit.*; import java.sql.SQLException; import static org.junit.Assert.*; public class StaffTest { @Test public void setAndCheckPasswordTest() throws SQLException, CannotHashPasswordException { Staff staff = new Staff(); staff.setPassword("foobar"); assertTrue("Password check failed for set password 'foobar'", staff.checkPassword("foobar")); } @Test public void isInLastPasswordsTest() throws CannotHashPasswordException { Staff staff = new Staff(); staff.setPassword("foobar"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); staff.setPassword("foobar2"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); staff.setPassword("foobar3"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); staff.setPassword("foobar4"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar4")); staff.setPassword("foobar5"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar4")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar5")); assertFalse("isInLastPasswords() returned true when it should have returned false", staff.isInLastPasswords("foobar")); } }
... package nuclibooktest.models; import com.j256.ormlite.dao.Dao; import com.j256.ormlite.dao.DaoManager; import com.j256.ormlite.support.ConnectionSource; import nuclibook.models.CannotHashPasswordException; import nuclibook.models.Staff; import nuclibook.models.Tracer; import nuclibooktest.test_utils.TestSqlServerConnection; import org.junit.*; import java.sql.SQLException; import static org.junit.Assert.*; public class StaffTest { @Test public void setAndCheckPasswordTest() throws SQLException, CannotHashPasswordException { Staff staff = new Staff(); staff.setPassword("foobar"); assertTrue("Password check failed for set password 'foobar'", staff.checkPassword("foobar")); } @Test public void isInLastPasswordsTest() throws CannotHashPasswordException { Staff staff = new Staff(); staff.setPassword("foobar"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); staff.setPassword("foobar2"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); staff.setPassword("foobar3"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); staff.setPassword("foobar4"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar4")); staff.setPassword("foobar5"); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar2")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar3")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar4")); assertTrue("isInLastPasswords() returned false when it should have returned true", staff.isInLastPasswords("foobar5")); assertFalse("isInLastPasswords() returned true when it should have returned false", staff.isInLastPasswords("foobar")); } } ...
d49b23365a972931502329f47a3aa65b9170477e
openstack/common/middleware/catch_errors.py
openstack/common/middleware/catch_errors.py
import webob.dec import webob.exc from openstack.common.gettextutils import _ # noqa from openstack.common import log as logging from openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response
import webob.dec import webob.exc from openstack.common.gettextutils import _LE from openstack.common import log as logging from openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_LE('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response
Update oslo log messages with translation domains
Update oslo log messages with translation domains Update the incubator code to use different domains for log messages at different levels. Update the import exceptions setting for hacking to allow multiple functions to be imported from gettextutils on one line. bp log-messages-translation-domain Change-Id: I6ce0f4a59438612ce74c46b3ee9398bef24c0c19
Python
apache-2.0
varunarya10/oslo.middleware,openstack/oslo.middleware,chungg/oslo.middleware,JioCloud/oslo.middleware
python
## Code Before: import webob.dec import webob.exc from openstack.common.gettextutils import _ # noqa from openstack.common import log as logging from openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response ## Instruction: Update oslo log messages with translation domains Update the incubator code to use different domains for log messages at different levels. Update the import exceptions setting for hacking to allow multiple functions to be imported from gettextutils on one line. bp log-messages-translation-domain Change-Id: I6ce0f4a59438612ce74c46b3ee9398bef24c0c19 ## Code After: import webob.dec import webob.exc from openstack.common.gettextutils import _LE from openstack.common import log as logging from openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_LE('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response
... import webob.dec import webob.exc from openstack.common.gettextutils import _LE from openstack.common import log as logging from openstack.common.middleware import base ... try: response = req.get_response(self.application) except Exception: LOG.exception(_LE('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response ...
766603021ff406c950e798ce3fb259c9f1f460c7
src/test/java/com/wombatnation/privateer/PrimitivesTest.java
src/test/java/com/wombatnation/privateer/PrimitivesTest.java
package com.wombatnation.privateer; import static org.junit.Assert.assertEquals; import org.junit.Ignore; import org.junit.Test; public class PrimitivesTest { private int intMethod(int a) { return a; } // @Test @Ignore public void testIntArg() throws Exception { Privateer p = new Privateer(); int a = 1; Object result = p.callMethod(this, "intMethod", a); Integer i = (Integer) result; assertEquals(i.intValue(), a); } }
package com.wombatnation.privateer; import static org.junit.Assert.assertEquals; import org.junit.Ignore; import org.junit.Test; public class PrimitivesTest { private int intMethod(int a) { return a; } @Test // Maven demands at least one runnable test public void makeMavenHappy() { } // @Test @Ignore public void testIntArg() throws Exception { Privateer p = new Privateer(); int a = 1; Object result = p.callMethod(this, "intMethod", a); Integer i = (Integer) result; assertEquals(i.intValue(), a); } }
Make annoying Maven happy by adding runnable test to in progress unit test file
Make annoying Maven happy by adding runnable test to in progress unit test file
Java
apache-2.0
RobertStewart/privateer
java
## Code Before: package com.wombatnation.privateer; import static org.junit.Assert.assertEquals; import org.junit.Ignore; import org.junit.Test; public class PrimitivesTest { private int intMethod(int a) { return a; } // @Test @Ignore public void testIntArg() throws Exception { Privateer p = new Privateer(); int a = 1; Object result = p.callMethod(this, "intMethod", a); Integer i = (Integer) result; assertEquals(i.intValue(), a); } } ## Instruction: Make annoying Maven happy by adding runnable test to in progress unit test file ## Code After: package com.wombatnation.privateer; import static org.junit.Assert.assertEquals; import org.junit.Ignore; import org.junit.Test; public class PrimitivesTest { private int intMethod(int a) { return a; } @Test // Maven demands at least one runnable test public void makeMavenHappy() { } // @Test @Ignore public void testIntArg() throws Exception { Privateer p = new Privateer(); int a = 1; Object result = p.callMethod(this, "intMethod", a); Integer i = (Integer) result; assertEquals(i.intValue(), a); } }
... return a; } @Test // Maven demands at least one runnable test public void makeMavenHappy() { } // @Test @Ignore public void testIntArg() throws Exception { ...
cd2ff46284a8144755b880c035d0a89938474955
salt/grains/extra.py
salt/grains/extra.py
from __future__ import absolute_import # Import python libs import os # Import third party libs import yaml import logging # Import salt libs import salt.utils.files log = logging.getLogger(__name__) def shell(): ''' Return the default shell to use on this system ''' # Provides: # shell return {'shell': os.environ.get('SHELL', '/bin/sh')} def config(): ''' Return the grains set in the grains file ''' if 'conf_file' not in __opts__: return {} if os.path.isdir(__opts__['conf_file']): gfn = os.path.join( __opts__['conf_file'], 'grains' ) else: gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'grains' ) if os.path.isfile(gfn): with salt.utils.files.fopen(gfn, 'rb') as fp_: try: return yaml.safe_load(fp_.read()) except Exception: log.warning("Bad syntax in grains file! Skipping.") return {} return {}
from __future__ import absolute_import # Import python libs import os # Import third party libs import yaml import logging # Import salt libs import salt.utils.files import salt.utils.platform log = logging.getLogger(__name__) def shell(): ''' Return the default shell to use on this system ''' # Provides: # shell if salt.utils.platform.is_windows(): env_var = 'COMSPEC' default = r'C:\Windows\system32\cmd.exe' else: env_var = 'SHELL' default = '/bin/sh' return {'shell': os.environ.get(env_var, default)} def config(): ''' Return the grains set in the grains file ''' if 'conf_file' not in __opts__: return {} if os.path.isdir(__opts__['conf_file']): gfn = os.path.join( __opts__['conf_file'], 'grains' ) else: gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'grains' ) if os.path.isfile(gfn): with salt.utils.files.fopen(gfn, 'rb') as fp_: try: return yaml.safe_load(fp_.read()) except Exception: log.warning("Bad syntax in grains file! Skipping.") return {} return {}
Return COMSPEC as the shell for Windows
Return COMSPEC as the shell for Windows
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
python
## Code Before: from __future__ import absolute_import # Import python libs import os # Import third party libs import yaml import logging # Import salt libs import salt.utils.files log = logging.getLogger(__name__) def shell(): ''' Return the default shell to use on this system ''' # Provides: # shell return {'shell': os.environ.get('SHELL', '/bin/sh')} def config(): ''' Return the grains set in the grains file ''' if 'conf_file' not in __opts__: return {} if os.path.isdir(__opts__['conf_file']): gfn = os.path.join( __opts__['conf_file'], 'grains' ) else: gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'grains' ) if os.path.isfile(gfn): with salt.utils.files.fopen(gfn, 'rb') as fp_: try: return yaml.safe_load(fp_.read()) except Exception: log.warning("Bad syntax in grains file! Skipping.") return {} return {} ## Instruction: Return COMSPEC as the shell for Windows ## Code After: from __future__ import absolute_import # Import python libs import os # Import third party libs import yaml import logging # Import salt libs import salt.utils.files import salt.utils.platform log = logging.getLogger(__name__) def shell(): ''' Return the default shell to use on this system ''' # Provides: # shell if salt.utils.platform.is_windows(): env_var = 'COMSPEC' default = r'C:\Windows\system32\cmd.exe' else: env_var = 'SHELL' default = '/bin/sh' return {'shell': os.environ.get(env_var, default)} def config(): ''' Return the grains set in the grains file ''' if 'conf_file' not in __opts__: return {} if os.path.isdir(__opts__['conf_file']): gfn = os.path.join( __opts__['conf_file'], 'grains' ) else: gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'grains' ) if os.path.isfile(gfn): with salt.utils.files.fopen(gfn, 'rb') as fp_: try: return yaml.safe_load(fp_.read()) except Exception: log.warning("Bad syntax in grains file! Skipping.") return {} return {}
// ... existing code ... # Import salt libs import salt.utils.files import salt.utils.platform log = logging.getLogger(__name__) // ... modified code ... ''' # Provides: # shell if salt.utils.platform.is_windows(): env_var = 'COMSPEC' default = r'C:\Windows\system32\cmd.exe' else: env_var = 'SHELL' default = '/bin/sh' return {'shell': os.environ.get(env_var, default)} def config(): // ... rest of the code ...
4070507e3357d36f2412cc5c68a63780ae1b814d
glance_api_local_check.py
glance_api_local_check.py
from maas_common import (get_auth_ref, get_glance_client, status_err, status_ok, metric) import sys IMAGE_ENDPOINT = 'http://127.0.0.1:9292' def check(token): glance = get_glance_client(token, IMAGE_ENDPOINT) if glance is None: status_err('Unable to obtain valid glance client, cannot proceed') status_ok() metric('glance_api_local_status', 'uint32', 1) def main(): auth_ref = get_auth_ref() token = auth_ref['token']['id'] check(token) if __name__ == "__main__": main()
from maas_common import (status_ok, status_err, metric, get_keystone_client, get_auth_ref) from requests import Session from requests import exceptions as exc def check(auth_ref): keystone = get_keystone_client(auth_ref) tenant_id = keystone.tenant_id auth_token = keystone.auth_token registry_endpoint = 'http://127.0.0.1:9292/v2' api-status = 1 milliseconds = 0 s = Session() s.headers.update( {'Content-type': 'application/json', 'x-auth-token': auth_token}) try: # Hit something that isn't querying the glance-registry, since we # query glance-registry in separate checks r = s.get('%s/schemas/image' % registry_endpoint, verify=False, timeout=10) except (exc.ConnectionError, exc.HTTPError, exc.Timeout): api_status = 0 milliseconds = -1 except Exception as e: status_err(str(e)) else: milliseconds = r.elapsed.total_seconds() * 1000 if not r.ok: api_status = 0 status_ok() metric('glance_registry_local_status', 'uint32', api_status) metric('glance_registry_local_response_time', 'int32', milliseconds) def main(): auth_ref = get_auth_ref() check(auth_ref) if __name__ == "__main__": main()
Make a direct call to glance-api using requests
Make a direct call to glance-api using requests This change makes this check no longer use the glanceclient tool so we can craft a request that doesn't hit the glance-registry. The reason for this is that the glance-registry itself is tested in a different check and therefore we just need to ensure the glance-api itself is responding.
Python
apache-2.0
cfarquhar/rpc-openstack,stevelle/rpc-openstack,robb-romans/rpc-openstack,byronmccollum/rpc-openstack,cloudnull/rpc-maas,byronmccollum/rpc-openstack,mattt416/rpc-openstack,nrb/rpc-openstack,hughsaunders/rpc-openstack,darrenchan/rpc-openstack,byronmccollum/rpc-openstack,jacobwagner/rpc-openstack,jpmontez/rpc-openstack,briancurtin/rpc-maas,sigmavirus24/rpc-openstack,darrenchan/rpc-openstack,rcbops/rpc-openstack,BjoernT/rpc-openstack,prometheanfire/rpc-openstack,nrb/rpc-openstack,briancurtin/rpc-maas,cloudnull/rpc-maas,xeregin/rpc-openstack,busterswt/rpc-openstack,git-harry/rpc-openstack,major/rpc-openstack,stevelle/rpc-openstack,miguelgrinberg/rpc-openstack,darrenchan/rpc-openstack,mattt416/rpc-openstack,cloudnull/rpc-openstack,robb-romans/rpc-openstack,cfarquhar/rpc-openstack,andymcc/rpc-openstack,jpmontez/rpc-openstack,darrenchan/rpc-openstack,xeregin/rpc-openstack,sigmavirus24/rpc-openstack,claco/rpc-openstack,galstrom21/rpc-openstack,cfarquhar/rpc-maas,mancdaz/rpc-openstack,jpmontez/rpc-openstack,git-harry/rpc-openstack,npawelek/rpc-maas,shannonmitchell/rpc-openstack,briancurtin/rpc-maas,claco/rpc-openstack,prometheanfire/rpc-openstack,BjoernT/rpc-openstack,busterswt/rpc-openstack,cloudnull/rpc-maas,jacobwagner/rpc-openstack,stevelle/rpc-openstack,sigmavirus24/rpc-openstack,mattt416/rpc-openstack,claco/rpc-openstack,cfarquhar/rpc-maas,npawelek/rpc-maas,andymcc/rpc-openstack,nrb/rpc-openstack,rcbops/rpc-openstack,miguelgrinberg/rpc-openstack,xeregin/rpc-openstack,galstrom21/rpc-openstack,cloudnull/rpc-openstack,miguelgrinberg/rpc-openstack,sigmavirus24/rpc-openstack,xeregin/rpc-openstack,shannonmitchell/rpc-openstack,major/rpc-openstack,npawelek/rpc-maas,busterswt/rpc-openstack,andymcc/rpc-openstack,mancdaz/rpc-openstack,hughsaunders/rpc-openstack,cfarquhar/rpc-maas
python
## Code Before: from maas_common import (get_auth_ref, get_glance_client, status_err, status_ok, metric) import sys IMAGE_ENDPOINT = 'http://127.0.0.1:9292' def check(token): glance = get_glance_client(token, IMAGE_ENDPOINT) if glance is None: status_err('Unable to obtain valid glance client, cannot proceed') status_ok() metric('glance_api_local_status', 'uint32', 1) def main(): auth_ref = get_auth_ref() token = auth_ref['token']['id'] check(token) if __name__ == "__main__": main() ## Instruction: Make a direct call to glance-api using requests This change makes this check no longer use the glanceclient tool so we can craft a request that doesn't hit the glance-registry. The reason for this is that the glance-registry itself is tested in a different check and therefore we just need to ensure the glance-api itself is responding. ## Code After: from maas_common import (status_ok, status_err, metric, get_keystone_client, get_auth_ref) from requests import Session from requests import exceptions as exc def check(auth_ref): keystone = get_keystone_client(auth_ref) tenant_id = keystone.tenant_id auth_token = keystone.auth_token registry_endpoint = 'http://127.0.0.1:9292/v2' api-status = 1 milliseconds = 0 s = Session() s.headers.update( {'Content-type': 'application/json', 'x-auth-token': auth_token}) try: # Hit something that isn't querying the glance-registry, since we # query glance-registry in separate checks r = s.get('%s/schemas/image' % registry_endpoint, verify=False, timeout=10) except (exc.ConnectionError, exc.HTTPError, exc.Timeout): api_status = 0 milliseconds = -1 except Exception as e: status_err(str(e)) else: milliseconds = r.elapsed.total_seconds() * 1000 if not r.ok: api_status = 0 status_ok() metric('glance_registry_local_status', 'uint32', api_status) metric('glance_registry_local_response_time', 'int32', milliseconds) def main(): auth_ref = get_auth_ref() check(auth_ref) if __name__ == "__main__": main()
// ... existing code ... from maas_common import (status_ok, status_err, metric, get_keystone_client, get_auth_ref) from requests import Session from requests import exceptions as exc def check(auth_ref): keystone = get_keystone_client(auth_ref) tenant_id = keystone.tenant_id auth_token = keystone.auth_token registry_endpoint = 'http://127.0.0.1:9292/v2' api-status = 1 milliseconds = 0 s = Session() s.headers.update( {'Content-type': 'application/json', 'x-auth-token': auth_token}) try: # Hit something that isn't querying the glance-registry, since we # query glance-registry in separate checks r = s.get('%s/schemas/image' % registry_endpoint, verify=False, timeout=10) except (exc.ConnectionError, exc.HTTPError, exc.Timeout): api_status = 0 milliseconds = -1 except Exception as e: status_err(str(e)) else: milliseconds = r.elapsed.total_seconds() * 1000 if not r.ok: api_status = 0 status_ok() metric('glance_registry_local_status', 'uint32', api_status) metric('glance_registry_local_response_time', 'int32', milliseconds) def main(): auth_ref = get_auth_ref() check(auth_ref) if __name__ == "__main__": // ... rest of the code ...
0b1813bef37819209ed9fb5b06eb7495d0e0e1fb
netmiko/arista/arista_ssh.py
netmiko/arista/arista_ssh.py
from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): pass
import time from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): def special_login_handler(self, delay_factor=1): """ Arista adds a "Last login: " message that doesn't always have sufficient time to be handled """ time.sleep(3 * delay_factor) self.clear_buffer()
Improve Arista reliability on slow login
Improve Arista reliability on slow login
Python
mit
fooelisa/netmiko,ktbyers/netmiko,shamanu4/netmiko,ktbyers/netmiko,shamanu4/netmiko,shsingh/netmiko,shsingh/netmiko,isidroamv/netmiko,fooelisa/netmiko,isidroamv/netmiko
python
## Code Before: from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): pass ## Instruction: Improve Arista reliability on slow login ## Code After: import time from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): def special_login_handler(self, delay_factor=1): """ Arista adds a "Last login: " message that doesn't always have sufficient time to be handled """ time.sleep(3 * delay_factor) self.clear_buffer()
... import time from netmiko.ssh_connection import SSHConnection class AristaSSH(SSHConnection): def special_login_handler(self, delay_factor=1): """ Arista adds a "Last login: " message that doesn't always have sufficient time to be handled """ time.sleep(3 * delay_factor) self.clear_buffer() ...
edab226942fbab75aa66e16d5814b1c38c0e8507
2048/policy.py
2048/policy.py
import tensorflow as tf class EpsilonGreedyPolicy: def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay): self.env = env self.dqn = dqn self.epsilon_max = epsilon_max self.epsilon_min = epsilon_min self.epsilon_decay = epsilon_decay def take_action(self, state, step): explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp( -self.epsilon_decay * tf.cast(step, tf.float32)) if explore_probability > tf.random.uniform(shape=()): return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability state = tf.reshape(state, (1, *state.shape, -1)) q_preds = self.dqn(state)[0] return tf.argmax(q_preds, output_type=tf.int32), explore_probability
import tensorflow as tf class EpsilonGreedyPolicy: def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay): self.env = env self.dqn = dqn self.epsilon_max = epsilon_max self.epsilon_min = epsilon_min self.epsilon_decay = epsilon_decay def take_action(self, state, step): explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp( -self.epsilon_decay * tf.cast(step, tf.float32)) if explore_probability > tf.random.uniform(shape=()): return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability state = tf.expand_dims(state, axis=0) q_preds = self.dqn(state)[0] return tf.argmax(q_preds, output_type=tf.int32), explore_probability
Fix error in state shape in EGP
[2048] Fix error in state shape in EGP
Python
mit
akshaykurmi/reinforcement-learning
python
## Code Before: import tensorflow as tf class EpsilonGreedyPolicy: def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay): self.env = env self.dqn = dqn self.epsilon_max = epsilon_max self.epsilon_min = epsilon_min self.epsilon_decay = epsilon_decay def take_action(self, state, step): explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp( -self.epsilon_decay * tf.cast(step, tf.float32)) if explore_probability > tf.random.uniform(shape=()): return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability state = tf.reshape(state, (1, *state.shape, -1)) q_preds = self.dqn(state)[0] return tf.argmax(q_preds, output_type=tf.int32), explore_probability ## Instruction: [2048] Fix error in state shape in EGP ## Code After: import tensorflow as tf class EpsilonGreedyPolicy: def __init__(self, env, dqn, epsilon_max, epsilon_min, epsilon_decay): self.env = env self.dqn = dqn self.epsilon_max = epsilon_max self.epsilon_min = epsilon_min self.epsilon_decay = epsilon_decay def take_action(self, state, step): explore_probability = self.epsilon_min + (self.epsilon_max - self.epsilon_min) * tf.math.exp( -self.epsilon_decay * tf.cast(step, tf.float32)) if explore_probability > tf.random.uniform(shape=()): return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability state = tf.expand_dims(state, axis=0) q_preds = self.dqn(state)[0] return tf.argmax(q_preds, output_type=tf.int32), explore_probability
... -self.epsilon_decay * tf.cast(step, tf.float32)) if explore_probability > tf.random.uniform(shape=()): return tf.constant(self.env.action_space.sample(), dtype=tf.int32), explore_probability state = tf.expand_dims(state, axis=0) q_preds = self.dqn(state)[0] return tf.argmax(q_preds, output_type=tf.int32), explore_probability ...
7bb851e54b9cc245259809c828ddbef62239c210
sensor_mqtt.py
sensor_mqtt.py
import mosquitto import os import time import json import random import yaml # Load config stream = open("config.yml", 'r') config = yaml.load(stream) endpoint = os.environ['MQTT_ENDPOINT'] mypid = os.getpid() client_uniq = "sensor_mqtt_"+str(mypid) mqttc = mosquitto.Mosquitto(client_uniq) mqttc.username_pw_set(config['mqtt']['username']) mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True) def publish(sensor, reading_type, reading): sensor_config = config['sensors'][sensor][reading_type] if sensor_config: data = { 'version':'1.0.0', 'datastreams': [ { "id" : sensor_config['publish_id'], "datapoints": [ { "at": time.ctime(), "value": reading } ] } ] } mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data)) while mqttc.loop() == 0: publish("R1", "RIVR", random.randrange(0,255)) print "message published" time.sleep(1) pass def cleanup(): print "Ending and cleaning up" mqttc.disconnect()
import mosquitto import os import time import json import random import yaml # Load config stream = open("config.yml", 'r') config = yaml.load(stream) endpoint = os.environ['MQTT_ENDPOINT'] mypid = os.getpid() client_uniq = "sensor_mqtt_"+str(mypid) mqttc = mosquitto.Mosquitto(client_uniq) mqttc.username_pw_set(config['mqtt']['username']) mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True) def publish(sensor, reading_type, reading): try: sensor_config = config['sensors'][sensor][reading_type] except KeyError: print "unknown sensor or reading type: " + sensor + " " + reading_type else: if sensor_config: data = { 'version':'1.0.0', 'datastreams': [ { "id" : sensor_config['publish_id'], "datapoints": [ { "at": time.ctime(), "value": reading } ] } ] } mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data)) print "message published: " + sensor + " " + reading_type while mqttc.loop() == 0: publish("R1", "RIVR", random.randrange(0,255)) time.sleep(1) pass def cleanup(): print "Ending and cleaning up" mqttc.disconnect()
Handle sensors / types that aren't in config file
Handle sensors / types that aren't in config file
Python
mit
sushack/pi_sensor_mqtt,OxFloodNet/pi_sensor_mqtt
python
## Code Before: import mosquitto import os import time import json import random import yaml # Load config stream = open("config.yml", 'r') config = yaml.load(stream) endpoint = os.environ['MQTT_ENDPOINT'] mypid = os.getpid() client_uniq = "sensor_mqtt_"+str(mypid) mqttc = mosquitto.Mosquitto(client_uniq) mqttc.username_pw_set(config['mqtt']['username']) mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True) def publish(sensor, reading_type, reading): sensor_config = config['sensors'][sensor][reading_type] if sensor_config: data = { 'version':'1.0.0', 'datastreams': [ { "id" : sensor_config['publish_id'], "datapoints": [ { "at": time.ctime(), "value": reading } ] } ] } mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data)) while mqttc.loop() == 0: publish("R1", "RIVR", random.randrange(0,255)) print "message published" time.sleep(1) pass def cleanup(): print "Ending and cleaning up" mqttc.disconnect() ## Instruction: Handle sensors / types that aren't in config file ## Code After: import mosquitto import os import time import json import random import yaml # Load config stream = open("config.yml", 'r') config = yaml.load(stream) endpoint = os.environ['MQTT_ENDPOINT'] mypid = os.getpid() client_uniq = "sensor_mqtt_"+str(mypid) mqttc = mosquitto.Mosquitto(client_uniq) mqttc.username_pw_set(config['mqtt']['username']) mqttc.connect(config['mqtt']['broker'], config['mqtt']['port'], 60, True) def publish(sensor, reading_type, reading): try: sensor_config = config['sensors'][sensor][reading_type] except KeyError: print "unknown sensor or reading type: " + sensor + " " + reading_type else: if sensor_config: data = { 'version':'1.0.0', 'datastreams': [ { "id" : sensor_config['publish_id'], "datapoints": [ { "at": time.ctime(), "value": reading } ] } ] } mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data)) print "message published: " + sensor + " " + reading_type while mqttc.loop() == 0: publish("R1", "RIVR", random.randrange(0,255)) time.sleep(1) pass def cleanup(): print "Ending and cleaning up" mqttc.disconnect()
... def publish(sensor, reading_type, reading): try: sensor_config = config['sensors'][sensor][reading_type] except KeyError: print "unknown sensor or reading type: " + sensor + " " + reading_type else: if sensor_config: data = { 'version':'1.0.0', 'datastreams': [ { "id" : sensor_config['publish_id'], "datapoints": [ { "at": time.ctime(), "value": reading } ] } ] } mqttc.publish(sensor_config['mqtt_endpoint'], json.dumps(data)) print "message published: " + sensor + " " + reading_type while mqttc.loop() == 0: publish("R1", "RIVR", random.randrange(0,255)) time.sleep(1) pass ...
2bfcd96325b8f0a677658a13440c7ac0066915e2
include/stm8_gpio.h
include/stm8_gpio.h
typedef enum { PORT_A = PA_ODR, PORT_B = PB_ODR, PORT_C = PB_, PORT_D, PORT_E, PORT_F } port_t; void toggle_port_a_pin(uint8_t pin); void set_high_port_a_pin(uint8_t pin); void set_low_port_a_pin(uint8_t pin); void set
struct input_pin_config { bool pull_up_enable; bool interrupt_enable; }; struct output_pin_config { bool open_drain_enable; bool fast_mode_enable; }; inline void set_port_a(uint8_t value) { PA_ODR = value; } inline void toggle_port_a_pin(uint8_t pin) { set_port_a((*(uart16_t *) PA_ODR) ^ ~(1 << pin)); } inline void set_high_port_a_pin(uint8_t pin) { set_port_a((*(uint16_t *) PA_ODR) | (1 << pin)); } inline void set_low_port_a_pin(uint8_t pin) { set_port_a((*(uart16_t *) PA_ODR) & ~(1 << pin)); } inline void read_port_a(uint8_t * value) { &value = (uint16_t *) PA_IDR; } inline bool read_port_a_pin(uint8_t pin) { uint8_t value; read_port_a_pin(value); return value >> pin; } inline void configure_port_a_input_pin(struct input_pin_config * config); inline void configure_port_a_output_pin(struct output_pin_config * config);
Add some inline functions for configuring gpio.
Add some inline functions for configuring gpio.
C
mit
tderensis/stm8_lib,tderensis/stm8_lib
c
## Code Before: typedef enum { PORT_A = PA_ODR, PORT_B = PB_ODR, PORT_C = PB_, PORT_D, PORT_E, PORT_F } port_t; void toggle_port_a_pin(uint8_t pin); void set_high_port_a_pin(uint8_t pin); void set_low_port_a_pin(uint8_t pin); void set ## Instruction: Add some inline functions for configuring gpio. ## Code After: struct input_pin_config { bool pull_up_enable; bool interrupt_enable; }; struct output_pin_config { bool open_drain_enable; bool fast_mode_enable; }; inline void set_port_a(uint8_t value) { PA_ODR = value; } inline void toggle_port_a_pin(uint8_t pin) { set_port_a((*(uart16_t *) PA_ODR) ^ ~(1 << pin)); } inline void set_high_port_a_pin(uint8_t pin) { set_port_a((*(uint16_t *) PA_ODR) | (1 << pin)); } inline void set_low_port_a_pin(uint8_t pin) { set_port_a((*(uart16_t *) PA_ODR) & ~(1 << pin)); } inline void read_port_a(uint8_t * value) { &value = (uint16_t *) PA_IDR; } inline bool read_port_a_pin(uint8_t pin) { uint8_t value; read_port_a_pin(value); return value >> pin; } inline void configure_port_a_input_pin(struct input_pin_config * config); inline void configure_port_a_output_pin(struct output_pin_config * config);
# ... existing code ... struct input_pin_config { bool pull_up_enable; bool interrupt_enable; }; struct output_pin_config { bool open_drain_enable; bool fast_mode_enable; }; inline void set_port_a(uint8_t value) { PA_ODR = value; } inline void toggle_port_a_pin(uint8_t pin) { set_port_a((*(uart16_t *) PA_ODR) ^ ~(1 << pin)); } inline void set_high_port_a_pin(uint8_t pin) { set_port_a((*(uint16_t *) PA_ODR) | (1 << pin)); } inline void set_low_port_a_pin(uint8_t pin) { set_port_a((*(uart16_t *) PA_ODR) & ~(1 << pin)); } inline void read_port_a(uint8_t * value) { &value = (uint16_t *) PA_IDR; } inline bool read_port_a_pin(uint8_t pin) { uint8_t value; read_port_a_pin(value); return value >> pin; } inline void configure_port_a_input_pin(struct input_pin_config * config); inline void configure_port_a_output_pin(struct output_pin_config * config); # ... rest of the code ...
46a376698851957813287fcb8deb1e7ebc222914
alfred_listener/__main__.py
alfred_listener/__main__.py
import os from argh import arg, ArghParser from functools import wraps def with_app(func): @wraps(func) @arg('--config', help='Path to config file', required=True) def wrapper(*args, **kwargs): config = args[0].config from alfred_listener import create_app app = create_app(config) return func(app, *args, **kwargs) return wrapper @arg('--host', default='127.0.0.1', help='the host') @arg('--port', default=5000, help='the port') @with_app def runserver(app, args): app.run(args.host, args.port) @with_app def shell(app, args): from alfred_listener.helpers import get_shell with app.test_request_context(): sh = get_shell() sh(app=app) def main(): parser = ArghParser() parser.add_commands([runserver, shell]) parser.dispatch() if __name__ == '__main__': main()
import os from argh import arg, ArghParser from functools import wraps def with_app(func): @wraps(func) @arg('--config', help='Path to config file', required=True) def wrapper(*args, **kwargs): config = args[0].config from alfred_listener import create_app app = create_app(config) return func(app, *args, **kwargs) return wrapper @arg('--host', default='127.0.0.1', help='the host') @arg('--port', default=5000, help='the port') @arg('--noreload', action='store_true', help='disable code reloader') @with_app def runserver(app, args): app.run(args.host, args.port, use_reloader=not args.noreload) @with_app def shell(app, args): from alfred_listener.helpers import get_shell with app.test_request_context(): sh = get_shell() sh(app=app) def main(): parser = ArghParser() parser.add_commands([runserver, shell]) parser.dispatch() if __name__ == '__main__': main()
Add an option to disable code reloader to runserver command
Add an option to disable code reloader to runserver command
Python
isc
alfredhq/alfred-listener
python
## Code Before: import os from argh import arg, ArghParser from functools import wraps def with_app(func): @wraps(func) @arg('--config', help='Path to config file', required=True) def wrapper(*args, **kwargs): config = args[0].config from alfred_listener import create_app app = create_app(config) return func(app, *args, **kwargs) return wrapper @arg('--host', default='127.0.0.1', help='the host') @arg('--port', default=5000, help='the port') @with_app def runserver(app, args): app.run(args.host, args.port) @with_app def shell(app, args): from alfred_listener.helpers import get_shell with app.test_request_context(): sh = get_shell() sh(app=app) def main(): parser = ArghParser() parser.add_commands([runserver, shell]) parser.dispatch() if __name__ == '__main__': main() ## Instruction: Add an option to disable code reloader to runserver command ## Code After: import os from argh import arg, ArghParser from functools import wraps def with_app(func): @wraps(func) @arg('--config', help='Path to config file', required=True) def wrapper(*args, **kwargs): config = args[0].config from alfred_listener import create_app app = create_app(config) return func(app, *args, **kwargs) return wrapper @arg('--host', default='127.0.0.1', help='the host') @arg('--port', default=5000, help='the port') @arg('--noreload', action='store_true', help='disable code reloader') @with_app def runserver(app, args): app.run(args.host, args.port, use_reloader=not args.noreload) @with_app def shell(app, args): from alfred_listener.helpers import get_shell with app.test_request_context(): sh = get_shell() sh(app=app) def main(): parser = ArghParser() parser.add_commands([runserver, shell]) parser.dispatch() if __name__ == '__main__': main()
... @arg('--host', default='127.0.0.1', help='the host') @arg('--port', default=5000, help='the port') @arg('--noreload', action='store_true', help='disable code reloader') @with_app def runserver(app, args): app.run(args.host, args.port, use_reloader=not args.noreload) @with_app ...
93effb501a50d8265afd37826fbcab4dd4a87611
qa_app/views.py
qa_app/views.py
from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): user = Users.query(email=session['email']).first() attempts = Attempts.query(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts)
from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): user = Users.query.filter_by(email=session['email']).first() attempts = Attempts.query.filter_by(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts)
Fix user and attempts sqlalchemia request.
Fix user and attempts sqlalchemia request.
Python
apache-2.0
molecul/qa_app_flask,molecul/qa_app_flask,molecul/qa_app_flask
python
## Code Before: from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): user = Users.query(email=session['email']).first() attempts = Attempts.query(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts) ## Instruction: Fix user and attempts sqlalchemia request. ## Code After: from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): user = Users.query.filter_by(email=session['email']).first() attempts = Attempts.query.filter_by(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts)
... @views.route('/profile') @login_required def profile(): user = Users.query.filter_by(email=session['email']).first() attempts = Attempts.query.filter_by(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts) ...
1224552892d1d459864d5ab2dada328a20cc66e7
jobs/spiders/tvinna.py
jobs/spiders/tvinna.py
import dateutil.parser import scrapy.spiders from jobs.items import JobsItem class TvinnaSpider(scrapy.spiders.XMLFeedSpider): name = "tvinna" start_urls = ['http://www.tvinna.is/feed/?post_type=job_listing'] itertag = 'item' namespaces = [ ('atom', 'http://www.w3.org/2005/Atom'), ('content', 'http://purl.org/rss/1.0/modules/content/'), ('dc', 'http://purl.org/dc/elements/1.1/'), ('slash', 'http://purl.org/rss/1.0/modules/slash/'), ('sy', 'http://purl.org/rss/1.0/modules/syndication/'), ('wfw', 'http://wellformedweb.org/CommentAPI/'), ] def parse_node(self, response, node): item = JobsItem() item['spider'] = self.name item['title'] = node.xpath('title/text()').extract_first() item['company'] = node.xpath('dc:creator/text()').extract_first() item['url'] = node.xpath('link/text()').extract_first() time_posted = node.xpath('pubDate/text()').extract_first() item['posted'] = dateutil.parser.parse(time_posted).isoformat() return item
import dateutil.parser import scrapy import scrapy.spiders from jobs.items import JobsItem class TvinnaSpider(scrapy.spiders.XMLFeedSpider): name = "tvinna" start_urls = ['http://www.tvinna.is/feed/?post_type=job_listing'] itertag = 'item' namespaces = [ ('atom', 'http://www.w3.org/2005/Atom'), ('content', 'http://purl.org/rss/1.0/modules/content/'), ('dc', 'http://purl.org/dc/elements/1.1/'), ('slash', 'http://purl.org/rss/1.0/modules/slash/'), ('sy', 'http://purl.org/rss/1.0/modules/syndication/'), ('wfw', 'http://wellformedweb.org/CommentAPI/'), ] def parse_node(self, response, node): item = JobsItem() item['spider'] = self.name item['title'] = node.xpath('title/text()').extract_first() item['url'] = url = node.xpath('link/text()').extract_first() time_posted = node.xpath('pubDate/text()').extract_first() item['posted'] = dateutil.parser.parse(time_posted).isoformat() request = scrapy.Request(url, callback=self.parse_specific_job) request.meta['item'] = item yield request def parse_specific_job(self, response): item = response.meta['item'] item['company'] = response.css('.company a::text').extract_first() yield item
Fix the extraction of the company name.
Fix the extraction of the company name. There's an apparent bug in the Tvinna rss feed, such that the username of the person creating the listing is used in place of a company name in the `<cd:creator>` field. As a work around, we need to visit the job listing page, and extract it from that instead. It requires more requests, but yields more accurate results.
Python
apache-2.0
multiplechoice/workplace
python
## Code Before: import dateutil.parser import scrapy.spiders from jobs.items import JobsItem class TvinnaSpider(scrapy.spiders.XMLFeedSpider): name = "tvinna" start_urls = ['http://www.tvinna.is/feed/?post_type=job_listing'] itertag = 'item' namespaces = [ ('atom', 'http://www.w3.org/2005/Atom'), ('content', 'http://purl.org/rss/1.0/modules/content/'), ('dc', 'http://purl.org/dc/elements/1.1/'), ('slash', 'http://purl.org/rss/1.0/modules/slash/'), ('sy', 'http://purl.org/rss/1.0/modules/syndication/'), ('wfw', 'http://wellformedweb.org/CommentAPI/'), ] def parse_node(self, response, node): item = JobsItem() item['spider'] = self.name item['title'] = node.xpath('title/text()').extract_first() item['company'] = node.xpath('dc:creator/text()').extract_first() item['url'] = node.xpath('link/text()').extract_first() time_posted = node.xpath('pubDate/text()').extract_first() item['posted'] = dateutil.parser.parse(time_posted).isoformat() return item ## Instruction: Fix the extraction of the company name. There's an apparent bug in the Tvinna rss feed, such that the username of the person creating the listing is used in place of a company name in the `<cd:creator>` field. As a work around, we need to visit the job listing page, and extract it from that instead. It requires more requests, but yields more accurate results. ## Code After: import dateutil.parser import scrapy import scrapy.spiders from jobs.items import JobsItem class TvinnaSpider(scrapy.spiders.XMLFeedSpider): name = "tvinna" start_urls = ['http://www.tvinna.is/feed/?post_type=job_listing'] itertag = 'item' namespaces = [ ('atom', 'http://www.w3.org/2005/Atom'), ('content', 'http://purl.org/rss/1.0/modules/content/'), ('dc', 'http://purl.org/dc/elements/1.1/'), ('slash', 'http://purl.org/rss/1.0/modules/slash/'), ('sy', 'http://purl.org/rss/1.0/modules/syndication/'), ('wfw', 'http://wellformedweb.org/CommentAPI/'), ] def parse_node(self, response, node): item = JobsItem() item['spider'] = self.name item['title'] = node.xpath('title/text()').extract_first() item['url'] = url = node.xpath('link/text()').extract_first() time_posted = node.xpath('pubDate/text()').extract_first() item['posted'] = dateutil.parser.parse(time_posted).isoformat() request = scrapy.Request(url, callback=self.parse_specific_job) request.meta['item'] = item yield request def parse_specific_job(self, response): item = response.meta['item'] item['company'] = response.css('.company a::text').extract_first() yield item
... import dateutil.parser import scrapy import scrapy.spiders from jobs.items import JobsItem ... item = JobsItem() item['spider'] = self.name item['title'] = node.xpath('title/text()').extract_first() item['url'] = url = node.xpath('link/text()').extract_first() time_posted = node.xpath('pubDate/text()').extract_first() item['posted'] = dateutil.parser.parse(time_posted).isoformat() request = scrapy.Request(url, callback=self.parse_specific_job) request.meta['item'] = item yield request def parse_specific_job(self, response): item = response.meta['item'] item['company'] = response.css('.company a::text').extract_first() yield item ...
b844b5ea9f7df47a9c000699b6b2636fa16a20cd
lfc/context_processors.py
lfc/context_processors.py
import lfc.utils from django.conf import settings from django.utils import translation def main(request): """context processor for LFC. """ current_language = translation.get_language() default_language = settings.LANGUAGE_CODE return { "PORTAL" : lfc.utils.get_portal(), "LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE, "DEFAULT_LANGUAGE" : default_language, "CURRENT_LANGUAGE" : current_language, "IS_DEFAULT_LANGUAGE" : default_language == current_language, }
import lfc.utils from django.conf import settings from django.utils import translation def main(request): """context processor for LFC. """ current_language = translation.get_language() default_language = settings.LANGUAGE_CODE is_default_language = default_language == current_language if current_language == "0" or is_default_language: link_language = "" else: link_language = current_language return { "PORTAL" : lfc.utils.get_portal(), "LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE, "DEFAULT_LANGUAGE" : default_language, "CURRENT_LANGUAGE" : current_language, "IS_DEFAULT_LANGUAGE" : is_default_language, "LINK_LANGUAGE" : link_language, }
Return correct language for using within links
Improvement: Return correct language for using within links
Python
bsd-3-clause
diefenbach/django-lfc,diefenbach/django-lfc,diefenbach/django-lfc
python
## Code Before: import lfc.utils from django.conf import settings from django.utils import translation def main(request): """context processor for LFC. """ current_language = translation.get_language() default_language = settings.LANGUAGE_CODE return { "PORTAL" : lfc.utils.get_portal(), "LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE, "DEFAULT_LANGUAGE" : default_language, "CURRENT_LANGUAGE" : current_language, "IS_DEFAULT_LANGUAGE" : default_language == current_language, } ## Instruction: Improvement: Return correct language for using within links ## Code After: import lfc.utils from django.conf import settings from django.utils import translation def main(request): """context processor for LFC. """ current_language = translation.get_language() default_language = settings.LANGUAGE_CODE is_default_language = default_language == current_language if current_language == "0" or is_default_language: link_language = "" else: link_language = current_language return { "PORTAL" : lfc.utils.get_portal(), "LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE, "DEFAULT_LANGUAGE" : default_language, "CURRENT_LANGUAGE" : current_language, "IS_DEFAULT_LANGUAGE" : is_default_language, "LINK_LANGUAGE" : link_language, }
... """ current_language = translation.get_language() default_language = settings.LANGUAGE_CODE is_default_language = default_language == current_language if current_language == "0" or is_default_language: link_language = "" else: link_language = current_language return { "PORTAL" : lfc.utils.get_portal(), "LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE, "DEFAULT_LANGUAGE" : default_language, "CURRENT_LANGUAGE" : current_language, "IS_DEFAULT_LANGUAGE" : is_default_language, "LINK_LANGUAGE" : link_language, } ...
62e4f4b8262c78a20c26de7b9b23a89d2c2e1e90
examples/wsgi_app.py
examples/wsgi_app.py
import guv guv.monkey_patch() import guv.wsgi import logger logger.configure() def app(environ, start_response): status = '200 OK' output = [b'Hello World!'] content_length = str(len(b''.join(output))) response_headers = [('Content-type', 'text/plain'), ('Content-Length', content_length)] start_response(status, response_headers) return output if __name__ == '__main__': server_sock = guv.listen(('0.0.0.0', 8001)) guv.wsgi.serve(server_sock, app)
import guv guv.monkey_patch() import guv.wsgi import logger logger.configure() def app(environ, start_response): """ This is very basic WSGI app useful for testing the performance of guv and guv.wsgi without the overhead of a framework such as Flask. However, it can just as easily be any other WSGI app callable object, such as a Flask or Bottle app. """ status = '200 OK' output = [b'Hello World!'] content_length = str(len(b''.join(output))) response_headers = [('Content-type', 'text/plain'), ('Content-Length', content_length)] start_response(status, response_headers) return output if __name__ == '__main__': server_sock = guv.listen(('0.0.0.0', 8001)) guv.wsgi.serve(server_sock, app)
Add docstring to WSGI example
Add docstring to WSGI example
Python
mit
veegee/guv,veegee/guv
python
## Code Before: import guv guv.monkey_patch() import guv.wsgi import logger logger.configure() def app(environ, start_response): status = '200 OK' output = [b'Hello World!'] content_length = str(len(b''.join(output))) response_headers = [('Content-type', 'text/plain'), ('Content-Length', content_length)] start_response(status, response_headers) return output if __name__ == '__main__': server_sock = guv.listen(('0.0.0.0', 8001)) guv.wsgi.serve(server_sock, app) ## Instruction: Add docstring to WSGI example ## Code After: import guv guv.monkey_patch() import guv.wsgi import logger logger.configure() def app(environ, start_response): """ This is very basic WSGI app useful for testing the performance of guv and guv.wsgi without the overhead of a framework such as Flask. However, it can just as easily be any other WSGI app callable object, such as a Flask or Bottle app. """ status = '200 OK' output = [b'Hello World!'] content_length = str(len(b''.join(output))) response_headers = [('Content-type', 'text/plain'), ('Content-Length', content_length)] start_response(status, response_headers) return output if __name__ == '__main__': server_sock = guv.listen(('0.0.0.0', 8001)) guv.wsgi.serve(server_sock, app)
# ... existing code ... import guv guv.monkey_patch() import guv.wsgi import logger # ... modified code ... def app(environ, start_response): """ This is very basic WSGI app useful for testing the performance of guv and guv.wsgi without the overhead of a framework such as Flask. However, it can just as easily be any other WSGI app callable object, such as a Flask or Bottle app. """ status = '200 OK' output = [b'Hello World!'] content_length = str(len(b''.join(output))) # ... rest of the code ...
5eabea682317fe53d89fcf1b2ec98a1f44de51d3
rt/syslog/simpleConfig.py
rt/syslog/simpleConfig.py
hostname_patterns = [ ['KEEP', '.*'] ] path_patterns = [ ['PKGS', r'.*\/test_record_pkg'], ['SKIP', r'.*\/lua'], ['SKIP', r'.*\/expr'], ['SKIP', r'.*\/cc1'], ['SKIP', r'.*\/bash'], ['SKIP', r'.*\/collect2'], ['SKIP', r'.*\/mpich/.*'], ['SKIP', r'.*\/x86_64-linux-gnu.*'], ]
hostname_patterns = [ ['KEEP', '.*'] ] path_patterns = [ ['PKGS', r'.*\/test_record_pkg'], ['PKGS', r'.*\/python[0-9][^/][^/]*'], ['SKIP', r'.*\/lua'], ['SKIP', r'.*\/expr'], ['SKIP', r'.*\/cc1'], ['SKIP', r'.*\/bash'], ['SKIP', r'.*\/collect2'], ['SKIP', r'.*\/mpich/.*'], ['SKIP', r'.*\/x86_64-linux-gnu.*'], ] python_pkg_patterns = [ { 'k_s' : 'SKIP', 'kind' : 'path', 'patt' : r"^[^/]" }, # SKIP all built-in packages { 'k_s' : 'SKIP', 'kind' : 'name', 'patt' : r"^_" }, # SKIP names that start with a underscore { 'k_s' : 'SKIP', 'kind' : 'name', 'patt' : r".*\." }, # SKIP all names that are divided with periods: a.b.c { 'k_s' : 'KEEP', 'kind' : 'path', 'patt' : r".*/.local/" }, # KEEP all packages installed by users { 'k_s' : 'SKIP', 'kind' : 'path', 'patt' : r"/home" }, # SKIP all other packages in user locations ]
Add in python patterns for py pkg test
Add in python patterns for py pkg test
Python
lgpl-2.1
xalt/xalt,xalt/xalt,xalt/xalt,xalt/xalt,xalt/xalt,xalt/xalt,xalt/xalt
python
## Code Before: hostname_patterns = [ ['KEEP', '.*'] ] path_patterns = [ ['PKGS', r'.*\/test_record_pkg'], ['SKIP', r'.*\/lua'], ['SKIP', r'.*\/expr'], ['SKIP', r'.*\/cc1'], ['SKIP', r'.*\/bash'], ['SKIP', r'.*\/collect2'], ['SKIP', r'.*\/mpich/.*'], ['SKIP', r'.*\/x86_64-linux-gnu.*'], ] ## Instruction: Add in python patterns for py pkg test ## Code After: hostname_patterns = [ ['KEEP', '.*'] ] path_patterns = [ ['PKGS', r'.*\/test_record_pkg'], ['PKGS', r'.*\/python[0-9][^/][^/]*'], ['SKIP', r'.*\/lua'], ['SKIP', r'.*\/expr'], ['SKIP', r'.*\/cc1'], ['SKIP', r'.*\/bash'], ['SKIP', r'.*\/collect2'], ['SKIP', r'.*\/mpich/.*'], ['SKIP', r'.*\/x86_64-linux-gnu.*'], ] python_pkg_patterns = [ { 'k_s' : 'SKIP', 'kind' : 'path', 'patt' : r"^[^/]" }, # SKIP all built-in packages { 'k_s' : 'SKIP', 'kind' : 'name', 'patt' : r"^_" }, # SKIP names that start with a underscore { 'k_s' : 'SKIP', 'kind' : 'name', 'patt' : r".*\." }, # SKIP all names that are divided with periods: a.b.c { 'k_s' : 'KEEP', 'kind' : 'path', 'patt' : r".*/.local/" }, # KEEP all packages installed by users { 'k_s' : 'SKIP', 'kind' : 'path', 'patt' : r"/home" }, # SKIP all other packages in user locations ]
... ['KEEP', '.*'] ] path_patterns = [ ['PKGS', r'.*\/test_record_pkg'], ['PKGS', r'.*\/python[0-9][^/][^/]*'], ['SKIP', r'.*\/lua'], ['SKIP', r'.*\/expr'], ['SKIP', r'.*\/cc1'], ... ['SKIP', r'.*\/mpich/.*'], ['SKIP', r'.*\/x86_64-linux-gnu.*'], ] python_pkg_patterns = [ { 'k_s' : 'SKIP', 'kind' : 'path', 'patt' : r"^[^/]" }, # SKIP all built-in packages { 'k_s' : 'SKIP', 'kind' : 'name', 'patt' : r"^_" }, # SKIP names that start with a underscore { 'k_s' : 'SKIP', 'kind' : 'name', 'patt' : r".*\." }, # SKIP all names that are divided with periods: a.b.c { 'k_s' : 'KEEP', 'kind' : 'path', 'patt' : r".*/.local/" }, # KEEP all packages installed by users { 'k_s' : 'SKIP', 'kind' : 'path', 'patt' : r"/home" }, # SKIP all other packages in user locations ] ...
81246153033d38132903759cb7e33cf86c26a548
tests/test_attime.py
tests/test_attime.py
import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value)
import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', '10:00', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value)
Make sure HH:MM values are allowed
Make sure HH:MM values are allowed
Python
apache-2.0
michaelrice/graphite-api,alphapigger/graphite-api,Knewton/graphite-api,vladimir-smirnov-sociomantic/graphite-api,hubrick/graphite-api,GeorgeJahad/graphite-api,absalon-james/graphite-api,raintank/graphite-api,winguru/graphite-api,DaveBlooman/graphite-api,absalon-james/graphite-api,alphapigger/graphite-api,raintank/graphite-api,raintank/graphite-api,rackerlabs/graphite-api,michaelrice/graphite-api,DaveBlooman/graphite-api,Knewton/graphite-api,bogus-py/graphite-api,cybem/graphite-api-iow,vladimir-smirnov-sociomantic/graphite-api,GeorgeJahad/graphite-api,brutasse/graphite-api,tpeng/graphite-api,winguru/graphite-api,cybem/graphite-api-iow,rackerlabs/graphite-api,brutasse/graphite-api,hubrick/graphite-api,bogus-py/graphite-api,tpeng/graphite-api
python
## Code Before: import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value) ## Instruction: Make sure HH:MM values are allowed ## Code After: import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', '10:00', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value)
// ... existing code ... 'fri', 'sat', 'sun', '10:00', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) // ... rest of the code ...
7044e24f83b3afa4a40740dd0d8c1ba3937d51f1
rsa-algorithm/src/IOOperations.java
rsa-algorithm/src/IOOperations.java
/** * Project RSA Algorithm. * Copyright Michał Szczygieł. * Created at Feb 25, 2014. */ /** * * This class is responsible for safe operations on files. This class contatin * the basic input/output operations. * * @author Michał Szczygieł <[email protected]> * */ public class IOOperations { }
import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * Project RSA Algorithm. * Copyright Michał Szczygieł. * Created at Feb 25, 2014. */ /** * * This class is responsible for safe operations on files. This class contain * the basic input/output operations. * * @author Michał Szczygieł <[email protected]> * */ public class IOOperations { /** * Object to keep instance of RSA. */ private RSA rsa; /** * * @param rsa */ public IOOperations(RSA rsa) { this.setRsa(rsa); } /** * This method gets RSA instance. * * @return the RSA instance. */ public RSA getRsa() { return rsa; } /** * This method reads file from given directory. * * @param filePath * The directory for file. * @return The {@link InputStream} for read file. * @throws IOException */ public InputStream readFile(String filePath) throws IOException { InputStream InputStream = null; File file = new File(filePath); if (file.exists()) { InputStream = new FileInputStream(file); } return InputStream; } /** * This method sets RSA instance. * * @param rsa * The RSA to set */ public void setRsa(RSA rsa) { this.rsa = rsa; } /** * This method creates output stream to write into file. * * @param filePath * The directory for file location. * @return The {@link OutputStream} for file to write. * @throws FileNotFoundException */ public OutputStream writeFile(String filePath) throws FileNotFoundException { OutputStream outputStream = null; File file = new File(filePath); if (file.exists()) { outputStream = new FileOutputStream(file); } return outputStream; } }
Add method for read and write file
Add method for read and write file
Java
mit
M4GiK/tosi-projects,M4GiK/tosi-projects
java
## Code Before: /** * Project RSA Algorithm. * Copyright Michał Szczygieł. * Created at Feb 25, 2014. */ /** * * This class is responsible for safe operations on files. This class contatin * the basic input/output operations. * * @author Michał Szczygieł <[email protected]> * */ public class IOOperations { } ## Instruction: Add method for read and write file ## Code After: import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * Project RSA Algorithm. * Copyright Michał Szczygieł. * Created at Feb 25, 2014. */ /** * * This class is responsible for safe operations on files. This class contain * the basic input/output operations. * * @author Michał Szczygieł <[email protected]> * */ public class IOOperations { /** * Object to keep instance of RSA. */ private RSA rsa; /** * * @param rsa */ public IOOperations(RSA rsa) { this.setRsa(rsa); } /** * This method gets RSA instance. * * @return the RSA instance. */ public RSA getRsa() { return rsa; } /** * This method reads file from given directory. * * @param filePath * The directory for file. * @return The {@link InputStream} for read file. * @throws IOException */ public InputStream readFile(String filePath) throws IOException { InputStream InputStream = null; File file = new File(filePath); if (file.exists()) { InputStream = new FileInputStream(file); } return InputStream; } /** * This method sets RSA instance. * * @param rsa * The RSA to set */ public void setRsa(RSA rsa) { this.rsa = rsa; } /** * This method creates output stream to write into file. * * @param filePath * The directory for file location. * @return The {@link OutputStream} for file to write. * @throws FileNotFoundException */ public OutputStream writeFile(String filePath) throws FileNotFoundException { OutputStream outputStream = null; File file = new File(filePath); if (file.exists()) { outputStream = new FileOutputStream(file); } return outputStream; } }
# ... existing code ... import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * Project RSA Algorithm. * Copyright Michał Szczygieł. # ... modified code ... /** * * This class is responsible for safe operations on files. This class contain * the basic input/output operations. * * @author Michał Szczygieł <[email protected]> ... */ public class IOOperations { /** * Object to keep instance of RSA. */ private RSA rsa; /** * * @param rsa */ public IOOperations(RSA rsa) { this.setRsa(rsa); } /** * This method gets RSA instance. * * @return the RSA instance. */ public RSA getRsa() { return rsa; } /** * This method reads file from given directory. * * @param filePath * The directory for file. * @return The {@link InputStream} for read file. * @throws IOException */ public InputStream readFile(String filePath) throws IOException { InputStream InputStream = null; File file = new File(filePath); if (file.exists()) { InputStream = new FileInputStream(file); } return InputStream; } /** * This method sets RSA instance. * * @param rsa * The RSA to set */ public void setRsa(RSA rsa) { this.rsa = rsa; } /** * This method creates output stream to write into file. * * @param filePath * The directory for file location. * @return The {@link OutputStream} for file to write. * @throws FileNotFoundException */ public OutputStream writeFile(String filePath) throws FileNotFoundException { OutputStream outputStream = null; File file = new File(filePath); if (file.exists()) { outputStream = new FileOutputStream(file); } return outputStream; } } # ... rest of the code ...
c7456edf3bf0056a9f807c5cc6925533a9ed4c2a
src/main/java/ca/earthgrazer/codereviewer/service/ReviewManagementServiceImpl.java
src/main/java/ca/earthgrazer/codereviewer/service/ReviewManagementServiceImpl.java
package ca.earthgrazer.codereviewer.service; import java.util.HashMap; import java.util.List; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import ca.earthgrazer.codereviewer.model.ReviewFile; import ca.earthgrazer.codereviewer.repository.ReviewRepository; @Service public class ReviewManagementServiceImpl implements ReviewManagementService { @Autowired private ReviewRepository reviewRepo; Map<String, List<ReviewFile>> reviewMap = new HashMap<>(); @Override public String createReviewUnit(List<ReviewFile> fileList) { return reviewRepo.createReviewUnit(fileList); } @Override public List<ReviewFile> getReviewUnit(String refId) { return reviewRepo.getReviewUnit(refId); } }
package ca.earthgrazer.codereviewer.service; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import ca.earthgrazer.codereviewer.model.ReviewFile; import ca.earthgrazer.codereviewer.repository.ReviewRepository; @Service public class ReviewManagementServiceImpl implements ReviewManagementService { @Autowired private ReviewRepository reviewRepo; Map<String, List<ReviewFile>> reviewMap = new HashMap<>(); @Override public String createReviewUnit(List<ReviewFile> fileList) { // filter out empty files List<ReviewFile> filteredList = fileList.stream() .filter(f -> !f.fileName.isEmpty() || !f.fileContent.isEmpty() || !f.fileDiff.isEmpty() || !f.fileComment.isEmpty()) .collect(Collectors.toList()); return reviewRepo.createReviewUnit(filteredList); } @Override public List<ReviewFile> getReviewUnit(String refId) { return reviewRepo.getReviewUnit(refId); } }
Add filtering for empty review files
Add filtering for empty review files Resolves issue #4
Java
apache-2.0
earthgrazer/codereviewer,earthgrazer/codereviewer,earthgrazer/codereviewer
java
## Code Before: package ca.earthgrazer.codereviewer.service; import java.util.HashMap; import java.util.List; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import ca.earthgrazer.codereviewer.model.ReviewFile; import ca.earthgrazer.codereviewer.repository.ReviewRepository; @Service public class ReviewManagementServiceImpl implements ReviewManagementService { @Autowired private ReviewRepository reviewRepo; Map<String, List<ReviewFile>> reviewMap = new HashMap<>(); @Override public String createReviewUnit(List<ReviewFile> fileList) { return reviewRepo.createReviewUnit(fileList); } @Override public List<ReviewFile> getReviewUnit(String refId) { return reviewRepo.getReviewUnit(refId); } } ## Instruction: Add filtering for empty review files Resolves issue #4 ## Code After: package ca.earthgrazer.codereviewer.service; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import ca.earthgrazer.codereviewer.model.ReviewFile; import ca.earthgrazer.codereviewer.repository.ReviewRepository; @Service public class ReviewManagementServiceImpl implements ReviewManagementService { @Autowired private ReviewRepository reviewRepo; Map<String, List<ReviewFile>> reviewMap = new HashMap<>(); @Override public String createReviewUnit(List<ReviewFile> fileList) { // filter out empty files List<ReviewFile> filteredList = fileList.stream() .filter(f -> !f.fileName.isEmpty() || !f.fileContent.isEmpty() || !f.fileDiff.isEmpty() || !f.fileComment.isEmpty()) .collect(Collectors.toList()); return reviewRepo.createReviewUnit(filteredList); } @Override public List<ReviewFile> getReviewUnit(String refId) { return reviewRepo.getReviewUnit(refId); } }
# ... existing code ... import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; # ... modified code ... @Override public String createReviewUnit(List<ReviewFile> fileList) { // filter out empty files List<ReviewFile> filteredList = fileList.stream() .filter(f -> !f.fileName.isEmpty() || !f.fileContent.isEmpty() || !f.fileDiff.isEmpty() || !f.fileComment.isEmpty()) .collect(Collectors.toList()); return reviewRepo.createReviewUnit(filteredList); } @Override # ... rest of the code ...
1a3c1576138400b28ca7093a842dc7d044d5892b
MKFirebaseObjectMapping/Classes/MKFirebaseModel.h
MKFirebaseObjectMapping/Classes/MKFirebaseModel.h
// // Created by Michael Kuck on 7/7/16. // Copyright (c) 2016 Michael Kuck. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN @class FIRDatabaseReference; @class FIRDataSnapshot; //============================================================ //== Public Interface //============================================================ @interface MKFirebaseModel : NSObject @property (nonatomic, readonly) FIRDatabaseReference *firebaseRef; @property (nonatomic, readonly) NSString *identifier; - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithFirebaseRef:(FIRDatabaseReference *)firebaseRef snapshotValue:(NSDictionary *)snapshotValue NS_DESIGNATED_INITIALIZER; - (instancetype)initWithSnapshot:(FIRDataSnapshot *)snapshot; @end NS_ASSUME_NONNULL_END
// // Created by Michael Kuck on 7/7/16. // Copyright (c) 2016 Michael Kuck. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN @class FIRDatabaseReference; @class FIRDataSnapshot; //============================================================ //== Public Interface //============================================================ @interface MKFirebaseModel : NSObject @property (nonatomic, readonly) FIRDatabaseReference *firebaseRef; @property (nonatomic, readonly) NSString *identifier; - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithFirebaseRef:(FIRDatabaseReference *)firebaseRef snapshotValue:(NSDictionary *)snapshotValue NS_DESIGNATED_INITIALIZER; - (instancetype)initWithSnapshot:(FIRDataSnapshot *)snapshot; - (BOOL)isEqualToFirebaseModel:(MKFirebaseModel *)firebaseModel; @end NS_ASSUME_NONNULL_END
Add `isEqualToFirebaseModel:` to public header
Add `isEqualToFirebaseModel:` to public header
C
mit
mikumi/MKFirebaseObjectMapping,mikumi/MKFirebaseObjectMapping,mikumi/MKFirebaseObjectMapping
c
## Code Before: // // Created by Michael Kuck on 7/7/16. // Copyright (c) 2016 Michael Kuck. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN @class FIRDatabaseReference; @class FIRDataSnapshot; //============================================================ //== Public Interface //============================================================ @interface MKFirebaseModel : NSObject @property (nonatomic, readonly) FIRDatabaseReference *firebaseRef; @property (nonatomic, readonly) NSString *identifier; - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithFirebaseRef:(FIRDatabaseReference *)firebaseRef snapshotValue:(NSDictionary *)snapshotValue NS_DESIGNATED_INITIALIZER; - (instancetype)initWithSnapshot:(FIRDataSnapshot *)snapshot; @end NS_ASSUME_NONNULL_END ## Instruction: Add `isEqualToFirebaseModel:` to public header ## Code After: // // Created by Michael Kuck on 7/7/16. // Copyright (c) 2016 Michael Kuck. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN @class FIRDatabaseReference; @class FIRDataSnapshot; //============================================================ //== Public Interface //============================================================ @interface MKFirebaseModel : NSObject @property (nonatomic, readonly) FIRDatabaseReference *firebaseRef; @property (nonatomic, readonly) NSString *identifier; - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithFirebaseRef:(FIRDatabaseReference *)firebaseRef snapshotValue:(NSDictionary *)snapshotValue NS_DESIGNATED_INITIALIZER; - (instancetype)initWithSnapshot:(FIRDataSnapshot *)snapshot; - (BOOL)isEqualToFirebaseModel:(MKFirebaseModel *)firebaseModel; @end NS_ASSUME_NONNULL_END
# ... existing code ... - (instancetype)initWithFirebaseRef:(FIRDatabaseReference *)firebaseRef snapshotValue:(NSDictionary *)snapshotValue NS_DESIGNATED_INITIALIZER; - (instancetype)initWithSnapshot:(FIRDataSnapshot *)snapshot; - (BOOL)isEqualToFirebaseModel:(MKFirebaseModel *)firebaseModel; @end NS_ASSUME_NONNULL_END # ... rest of the code ...
70f69f7b801404f7091e91b6ed997602709f9f42
commands/globaladd.py
commands/globaladd.py
from devbot import chat def call(message: str, name, protocol, cfg, commands): if message is '': chat.say('/msg {} {}'.format(name, commands['help']['globaladd'].format('globaladd'))) return if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message))
from devbot import chat def call(message: str, name, protocol, cfg, commands): if message is '': chat.say('/msg {} {}'.format(name, commands['help']['globaladd'].format('globaladd'))) return if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say( '/msg {} You have been invited to global chat. Use /g GlobalChat to enter, and /e to exit.'.format( message)) chat.say('/nlip GlobalChat {}'.format(message))
Fix gadd not sending tutorial
Fix gadd not sending tutorial
Python
mit
Ameliorate/DevotedBot,Ameliorate/DevotedBot
python
## Code Before: from devbot import chat def call(message: str, name, protocol, cfg, commands): if message is '': chat.say('/msg {} {}'.format(name, commands['help']['globaladd'].format('globaladd'))) return if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message)) ## Instruction: Fix gadd not sending tutorial ## Code After: from devbot import chat def call(message: str, name, protocol, cfg, commands): if message is '': chat.say('/msg {} {}'.format(name, commands['help']['globaladd'].format('globaladd'))) return if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say( '/msg {} You have been invited to global chat. Use /g GlobalChat to enter, and /e to exit.'.format( message)) chat.say('/nlip GlobalChat {}'.format(message))
... def call(message: str, name, protocol, cfg, commands): if message is '': chat.say('/msg {} {}'.format(name, commands['help']['globaladd'].format('globaladd'))) return if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say( '/msg {} You have been invited to global chat. Use /g GlobalChat to enter, and /e to exit.'.format( message)) chat.say('/nlip GlobalChat {}'.format(message)) ...
6f356a94c56053b47fb38670a93e04f46740f21e
tartpy/eventloop.py
tartpy/eventloop.py
import asyncio import queue import sched import threading import time from .singleton import Singleton class EventLoop(object, metaclass=Singleton): def __init__(self): self.loop = asyncio.get_event_loop() self.do = self.sync_do def sync_do(self, f, *args, **kwargs): f(*args, **kwargs) def thread_do(self, f, *args, **kwargs): self.loop.call_soon_threadsafe(f, *args, **kwargs) def schedule(self, target, event): self.do(self.loop.call_soon, event) def later(self, delay, event): self.do(self.loop.call_later, delay, event) def run(self): self.do = self.sync_do self.loop.run_forever() def run_once(self): self.stop_later() self.run() def run_in_thread(self): self.do = self.thread_do self.thread = threading.Thread(target=self.loop.run_forever, name='asyncio_event_loop') self.thread.daemon = True self.thread.start() def stop(self): self.loop.stop() def stop_later(self): self.do = self.sync_do self.schedule(self, self.stop)
import asyncio import queue import sched import threading import time from .singleton import Singleton class EventLoop(object, metaclass=Singleton): def __init__(self): self.loop = asyncio.get_event_loop() self.do = self.sync_do def sync_do(self, f, *args, **kwargs): f(*args, **kwargs) def thread_do(self, f, *args, **kwargs): self.loop.call_soon_threadsafe(f, *args, **kwargs) def schedule(self, target, event): self.do(self.loop.call_soon, event) def later(self, delay, event): self.do(self.loop.call_later, delay, event) def run(self): self.do = self.sync_do self.loop.run_forever() def run_once(self): self.stop_later() self.run() def run_in_thread(self): self.do = self.thread_do self.thread = threading.Thread(target=self.loop.run_forever, name='asyncio_event_loop') self.thread.daemon = True self.thread.start() def stop(self): self.thread_do(self.loop.stop) def stop_later(self): self.do = self.sync_do self.schedule(self, self.stop)
Make sure that 'stop' works from everywhere
Make sure that 'stop' works from everywhere
Python
mit
waltermoreira/tartpy
python
## Code Before: import asyncio import queue import sched import threading import time from .singleton import Singleton class EventLoop(object, metaclass=Singleton): def __init__(self): self.loop = asyncio.get_event_loop() self.do = self.sync_do def sync_do(self, f, *args, **kwargs): f(*args, **kwargs) def thread_do(self, f, *args, **kwargs): self.loop.call_soon_threadsafe(f, *args, **kwargs) def schedule(self, target, event): self.do(self.loop.call_soon, event) def later(self, delay, event): self.do(self.loop.call_later, delay, event) def run(self): self.do = self.sync_do self.loop.run_forever() def run_once(self): self.stop_later() self.run() def run_in_thread(self): self.do = self.thread_do self.thread = threading.Thread(target=self.loop.run_forever, name='asyncio_event_loop') self.thread.daemon = True self.thread.start() def stop(self): self.loop.stop() def stop_later(self): self.do = self.sync_do self.schedule(self, self.stop) ## Instruction: Make sure that 'stop' works from everywhere ## Code After: import asyncio import queue import sched import threading import time from .singleton import Singleton class EventLoop(object, metaclass=Singleton): def __init__(self): self.loop = asyncio.get_event_loop() self.do = self.sync_do def sync_do(self, f, *args, **kwargs): f(*args, **kwargs) def thread_do(self, f, *args, **kwargs): self.loop.call_soon_threadsafe(f, *args, **kwargs) def schedule(self, target, event): self.do(self.loop.call_soon, event) def later(self, delay, event): self.do(self.loop.call_later, delay, event) def run(self): self.do = self.sync_do self.loop.run_forever() def run_once(self): self.stop_later() self.run() def run_in_thread(self): self.do = self.thread_do self.thread = threading.Thread(target=self.loop.run_forever, name='asyncio_event_loop') self.thread.daemon = True self.thread.start() def stop(self): self.thread_do(self.loop.stop) def stop_later(self): self.do = self.sync_do self.schedule(self, self.stop)
// ... existing code ... self.thread.start() def stop(self): self.thread_do(self.loop.stop) def stop_later(self): self.do = self.sync_do // ... rest of the code ...
740513cc6ee8a9fcb0f1aa08944d1152c6163099
src/main/java/at/ac/tuwien/inso/entity/SemesterType.java
src/main/java/at/ac/tuwien/inso/entity/SemesterType.java
package at.ac.tuwien.inso.entity; public enum SemesterType { WinterSemester("WS"), SummerSemester("SS"); private final String name; SemesterType(String name) { this.name = name; } @Override public String toString() { return name; } /** * Reverse of toString */ public static SemesterType fromString(String name) { for (SemesterType type : SemesterType.values()) { if (type.toString().equals(name)) { return type; } } throw new IllegalArgumentException("Type '" + name + "' is not a valid SemesterType"); } }
package at.ac.tuwien.inso.entity; public enum SemesterType { WinterSemester("WS", 10, 1), SummerSemester("SS", 3, 1); /** * Name of the semester: WS or SS */ private final String name; /** * Month the semester starts */ private final int startMonth; /** * Day in month the semester starts */ private final int startDay; SemesterType(String name, int startMonth, int startDay) { this.name = name; this.startMonth = startMonth; this.startDay = startDay; } public int getStartMonth() { return startMonth; } public int getStartDay() { return startDay; } @Override public String toString() { return name; } /** * Reverse of toString */ public static SemesterType fromString(String name) { for (SemesterType type : SemesterType.values()) { if (type.toString().equals(name)) { return type; } } throw new IllegalArgumentException("Type '" + name + "' is not a valid SemesterType"); } }
Add start dates to Semester Types
Add start dates to Semester Types
Java
mit
university-information-system/uis,university-information-system/uis,university-information-system/uis,university-information-system/uis
java
## Code Before: package at.ac.tuwien.inso.entity; public enum SemesterType { WinterSemester("WS"), SummerSemester("SS"); private final String name; SemesterType(String name) { this.name = name; } @Override public String toString() { return name; } /** * Reverse of toString */ public static SemesterType fromString(String name) { for (SemesterType type : SemesterType.values()) { if (type.toString().equals(name)) { return type; } } throw new IllegalArgumentException("Type '" + name + "' is not a valid SemesterType"); } } ## Instruction: Add start dates to Semester Types ## Code After: package at.ac.tuwien.inso.entity; public enum SemesterType { WinterSemester("WS", 10, 1), SummerSemester("SS", 3, 1); /** * Name of the semester: WS or SS */ private final String name; /** * Month the semester starts */ private final int startMonth; /** * Day in month the semester starts */ private final int startDay; SemesterType(String name, int startMonth, int startDay) { this.name = name; this.startMonth = startMonth; this.startDay = startDay; } public int getStartMonth() { return startMonth; } public int getStartDay() { return startDay; } @Override public String toString() { return name; } /** * Reverse of toString */ public static SemesterType fromString(String name) { for (SemesterType type : SemesterType.values()) { if (type.toString().equals(name)) { return type; } } throw new IllegalArgumentException("Type '" + name + "' is not a valid SemesterType"); } }
// ... existing code ... package at.ac.tuwien.inso.entity; public enum SemesterType { WinterSemester("WS", 10, 1), SummerSemester("SS", 3, 1); /** * Name of the semester: WS or SS */ private final String name; /** * Month the semester starts */ private final int startMonth; /** * Day in month the semester starts */ private final int startDay; SemesterType(String name, int startMonth, int startDay) { this.name = name; this.startMonth = startMonth; this.startDay = startDay; } public int getStartMonth() { return startMonth; } public int getStartDay() { return startDay; } @Override // ... rest of the code ...
d500e290f8c1422f74b1d8c8d2bbb8ec9e5529cb
misc/singleton.py
misc/singleton.py
class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. """ _instances = {} @classmethod def instance(cls): if cls not in cls._instances: cls._instances[cls] = cls() return cls._instances[cls]
class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. Classes that use this must: 1) Add Singleton as a superclass. 2) Have this signature for the constructor: __init__(self, *args, **kwargs) """ _instances = {} @classmethod def instance(cls, *args, **kwargs): if cls not in cls._instances: cls._instances[cls] = cls(*args, **kwargs) return cls._instances[cls]
Add comments to Singleton about usage.
Add comments to Singleton about usage.
Python
mit
dpazel/music_rep
python
## Code Before: class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. """ _instances = {} @classmethod def instance(cls): if cls not in cls._instances: cls._instances[cls] = cls() return cls._instances[cls] ## Instruction: Add comments to Singleton about usage. ## Code After: class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. Classes that use this must: 1) Add Singleton as a superclass. 2) Have this signature for the constructor: __init__(self, *args, **kwargs) """ _instances = {} @classmethod def instance(cls, *args, **kwargs): if cls not in cls._instances: cls._instances[cls] = cls(*args, **kwargs) return cls._instances[cls]
... class Singleton(object): """ ... The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. Classes that use this must: 1) Add Singleton as a superclass. 2) Have this signature for the constructor: __init__(self, *args, **kwargs) """ _instances = {} @classmethod def instance(cls, *args, **kwargs): if cls not in cls._instances: cls._instances[cls] = cls(*args, **kwargs) return cls._instances[cls] ...
3ceba413c57eec2034fb02e8a5557e69cf54a415
litslist/commands.py
litslist/commands.py
import os import csv import random def run_create(count): file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)] sets = {} for filename in file_list: content = open(os.path.join(os.curdir, filename)).read().split('\n') random.shuffle(content) sets[filename[:filename.index('.')].title()] = content if not os.path.exists('Lists'): os.mkdir('Lists') for i in xrange(count): try: f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w') file_content = '' for category, category_list in sets.items(): file_content += (category + ' -- ' + category_list[i] + '\n') f.write(file_content) f.close() except IndexError: break_point = i break return
import os import csv import random def run_create(count): file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)] sets = {} for filename in file_list: content = open(os.path.join(os.curdir, filename)).read().split('\n') random.shuffle(content) sets[filename[:filename.index('.')].title()] = content if not os.path.exists('Lists'): os.mkdir('Lists') break_point = count for i in xrange(count): try: f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w') file_content = '' for category, category_list in sets.items(): file_content += (category + ' -- ' + category_list[i] + '\n') f.write(file_content) f.close() except IndexError: break_point = i break try: print sets for category, category_list in sets.items(): f = open(os.path.join(os.curdir, 'Remaining_' + category + '.txt'), 'w') file_content = "\n".join(category_list[break_point:]) f.write(file_content) f.close() except NameError, e: print e pass return
Set up creating files for unused items
Set up creating files for unused items
Python
mit
AlexMathew/litslist
python
## Code Before: import os import csv import random def run_create(count): file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)] sets = {} for filename in file_list: content = open(os.path.join(os.curdir, filename)).read().split('\n') random.shuffle(content) sets[filename[:filename.index('.')].title()] = content if not os.path.exists('Lists'): os.mkdir('Lists') for i in xrange(count): try: f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w') file_content = '' for category, category_list in sets.items(): file_content += (category + ' -- ' + category_list[i] + '\n') f.write(file_content) f.close() except IndexError: break_point = i break return ## Instruction: Set up creating files for unused items ## Code After: import os import csv import random def run_create(count): file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)] sets = {} for filename in file_list: content = open(os.path.join(os.curdir, filename)).read().split('\n') random.shuffle(content) sets[filename[:filename.index('.')].title()] = content if not os.path.exists('Lists'): os.mkdir('Lists') break_point = count for i in xrange(count): try: f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w') file_content = '' for category, category_list in sets.items(): file_content += (category + ' -- ' + category_list[i] + '\n') f.write(file_content) f.close() except IndexError: break_point = i break try: print sets for category, category_list in sets.items(): f = open(os.path.join(os.curdir, 'Remaining_' + category + '.txt'), 'w') file_content = "\n".join(category_list[break_point:]) f.write(file_content) f.close() except NameError, e: print e pass return
# ... existing code ... sets[filename[:filename.index('.')].title()] = content if not os.path.exists('Lists'): os.mkdir('Lists') break_point = count for i in xrange(count): try: f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w') # ... modified code ... except IndexError: break_point = i break try: print sets for category, category_list in sets.items(): f = open(os.path.join(os.curdir, 'Remaining_' + category + '.txt'), 'w') file_content = "\n".join(category_list[break_point:]) f.write(file_content) f.close() except NameError, e: print e pass return # ... rest of the code ...
2af15d2a6ec832a8b0325545c0c3766428cf7dd2
src/main/java/de/qabel/ackack/MessageInfo.java
src/main/java/de/qabel/ackack/MessageInfo.java
package de.qabel.ackack; /** * Created by tox on 11/25/14. */ public class MessageInfo implements Cloneable { private Actor sender; private long time; private String type; public Actor getSender() { return sender; } public long getTime() { return time; } public String getType() { return type; } public void setSender(Actor sender) { this.sender = sender; } public void setTime(long time) { this.time = time; } public void setType(String type) { this.type = type; } public void answer(final Object... data) { this.sender.post(this, data); } }
package de.qabel.ackack; /** * Created by tox on 11/25/14. */ public class MessageInfo implements Cloneable { private Actor sender; private long time; private String type; public Actor getSender() { return sender; } public long getTime() { return time; } public String getType() { return type; } public void setSender(Actor sender) { this.sender = sender; } public void setTime(long time) { this.time = time; } public void setType(String type) { this.type = type; } public void answer(final Object... data) { if (this.sender != null) { this.sender.post(this, data); } } }
Check whether sender is set
Check whether sender is set
Java
mit
Qabel/ackack
java
## Code Before: package de.qabel.ackack; /** * Created by tox on 11/25/14. */ public class MessageInfo implements Cloneable { private Actor sender; private long time; private String type; public Actor getSender() { return sender; } public long getTime() { return time; } public String getType() { return type; } public void setSender(Actor sender) { this.sender = sender; } public void setTime(long time) { this.time = time; } public void setType(String type) { this.type = type; } public void answer(final Object... data) { this.sender.post(this, data); } } ## Instruction: Check whether sender is set ## Code After: package de.qabel.ackack; /** * Created by tox on 11/25/14. */ public class MessageInfo implements Cloneable { private Actor sender; private long time; private String type; public Actor getSender() { return sender; } public long getTime() { return time; } public String getType() { return type; } public void setSender(Actor sender) { this.sender = sender; } public void setTime(long time) { this.time = time; } public void setType(String type) { this.type = type; } public void answer(final Object... data) { if (this.sender != null) { this.sender.post(this, data); } } }
... } public void answer(final Object... data) { if (this.sender != null) { this.sender.post(this, data); } } } ...
7039dd833186ba8430aae55de3e856ac0426f90c
examples/rust_with_cffi/setup.py
examples/rust_with_cffi/setup.py
import platform import sys from setuptools import setup from setuptools_rust import RustExtension setup( name="rust-with-cffi", version="0.1.0", classifiers=[ "License :: OSI Approved :: MIT License", "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Rust", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", ], packages=["rust_with_cffi"], rust_extensions=[ RustExtension("rust_with_cffi.rust"), ], cffi_modules=["cffi_module.py:ffi"], install_requires=["cffi"], setup_requires=["cffi"], include_package_data=True, zip_safe=False, )
import platform import sys from setuptools import setup from setuptools_rust import RustExtension setup( name="rust-with-cffi", version="0.1.0", classifiers=[ "License :: OSI Approved :: MIT License", "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Rust", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", ], packages=["rust_with_cffi"], rust_extensions=[ RustExtension("rust_with_cffi.rust", py_limited_api="auto"), ], cffi_modules=["cffi_module.py:ffi"], install_requires=["cffi"], setup_requires=["cffi"], include_package_data=True, zip_safe=False, )
Use py_limited_api="auto" in rust_with_cffi example
Use py_limited_api="auto" in rust_with_cffi example
Python
mit
PyO3/setuptools-rust,PyO3/setuptools-rust
python
## Code Before: import platform import sys from setuptools import setup from setuptools_rust import RustExtension setup( name="rust-with-cffi", version="0.1.0", classifiers=[ "License :: OSI Approved :: MIT License", "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Rust", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", ], packages=["rust_with_cffi"], rust_extensions=[ RustExtension("rust_with_cffi.rust"), ], cffi_modules=["cffi_module.py:ffi"], install_requires=["cffi"], setup_requires=["cffi"], include_package_data=True, zip_safe=False, ) ## Instruction: Use py_limited_api="auto" in rust_with_cffi example ## Code After: import platform import sys from setuptools import setup from setuptools_rust import RustExtension setup( name="rust-with-cffi", version="0.1.0", classifiers=[ "License :: OSI Approved :: MIT License", "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Rust", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", ], packages=["rust_with_cffi"], rust_extensions=[ RustExtension("rust_with_cffi.rust", py_limited_api="auto"), ], cffi_modules=["cffi_module.py:ffi"], install_requires=["cffi"], setup_requires=["cffi"], include_package_data=True, zip_safe=False, )
... ], packages=["rust_with_cffi"], rust_extensions=[ RustExtension("rust_with_cffi.rust", py_limited_api="auto"), ], cffi_modules=["cffi_module.py:ffi"], install_requires=["cffi"], ...
294940e3e8b4b10a273a7e1697eeb047d8c32036
easylog-jee/src/main/java/easylog/jee/Easylog.java
easylog-jee/src/main/java/easylog/jee/Easylog.java
package easylog.jee; import javax.interceptor.InterceptorBinding; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; @Inherited @InterceptorBinding @Retention(RUNTIME) @Target(TYPE) public @interface Easylog { }
package easylog.jee; import javax.interceptor.InterceptorBinding; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; @Inherited @InterceptorBinding @Retention(RUNTIME) @Target({TYPE, METHOD}) public @interface Easylog { }
Allow interceptor annotation to be placed on methods.
Allow interceptor annotation to be placed on methods.
Java
mit
epyon81/easylog
java
## Code Before: package easylog.jee; import javax.interceptor.InterceptorBinding; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; @Inherited @InterceptorBinding @Retention(RUNTIME) @Target(TYPE) public @interface Easylog { } ## Instruction: Allow interceptor annotation to be placed on methods. ## Code After: package easylog.jee; import javax.interceptor.InterceptorBinding; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; @Inherited @InterceptorBinding @Retention(RUNTIME) @Target({TYPE, METHOD}) public @interface Easylog { }
// ... existing code ... import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; // ... modified code ... @Inherited @InterceptorBinding @Retention(RUNTIME) @Target({TYPE, METHOD}) public @interface Easylog { } // ... rest of the code ...
fd96f851dcb470dd3e5a912b84bb42384f9f291a
com.mindoo.domino.jna.xsp/src/com/mindoo/domino/jna/xsp/internal/DominoJNAActivator.java
com.mindoo.domino.jna.xsp/src/com/mindoo/domino/jna/xsp/internal/DominoJNAActivator.java
package com.mindoo.domino.jna.xsp.internal; import org.eclipse.core.runtime.Plugin; import org.osgi.framework.BundleContext; public class DominoJNAActivator extends Plugin { public static final String PLUGIN_ID = "com.mindoo.domino.jna.xsp"; private static DominoJNAActivator plugin; @Override public void start(BundleContext context) throws Exception { super.start(context); plugin = this; } @Override public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } public static DominoJNAActivator getDefault() { return plugin; } }
package com.mindoo.domino.jna.xsp.internal; import java.security.AccessController; import java.security.PrivilegedAction; import org.eclipse.core.runtime.Plugin; import org.osgi.framework.BundleContext; public class DominoJNAActivator extends Plugin { public static final String PLUGIN_ID = "com.mindoo.domino.jna.xsp"; private static DominoJNAActivator plugin; private static Class m_jnaNativeClazz; @Override public void start(BundleContext context) throws Exception { super.start(context); plugin = this; if (m_jnaNativeClazz==null) { m_jnaNativeClazz = AccessController.doPrivileged(new PrivilegedAction<Class>() { @Override public Class run() { //enforce using the extracted JNA .dll/.so file instead of what we find on the PATH System.setProperty("jna.nosys", "true"); //change the library name from the default "jnidispatch" to our own name, so that //JNA does not load an jnidispatcher.dll from the Server's program directory String oldLibName = System.getProperty("jna.boot.library.name"); System.setProperty("jna.boot.library.name", "dominojnadispatch"); try { //loading the Native class runs its static code that extracts and loads the jna dll return DominoJNAActivator.class.forName("com.sun.jna.Native"); } catch (ClassNotFoundException e) { e.printStackTrace(); } finally { if (oldLibName!=null) System.setProperty("jna.boot.library.name",oldLibName); } return null; } }); } } @Override public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } public static DominoJNAActivator getDefault() { return plugin; } }
Work around dll version conflicts w/ jnidispatch.dll in Domino progdir
Work around dll version conflicts w/ jnidispatch.dll in Domino progdir
Java
apache-2.0
klehmann/domino-jna
java
## Code Before: package com.mindoo.domino.jna.xsp.internal; import org.eclipse.core.runtime.Plugin; import org.osgi.framework.BundleContext; public class DominoJNAActivator extends Plugin { public static final String PLUGIN_ID = "com.mindoo.domino.jna.xsp"; private static DominoJNAActivator plugin; @Override public void start(BundleContext context) throws Exception { super.start(context); plugin = this; } @Override public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } public static DominoJNAActivator getDefault() { return plugin; } } ## Instruction: Work around dll version conflicts w/ jnidispatch.dll in Domino progdir ## Code After: package com.mindoo.domino.jna.xsp.internal; import java.security.AccessController; import java.security.PrivilegedAction; import org.eclipse.core.runtime.Plugin; import org.osgi.framework.BundleContext; public class DominoJNAActivator extends Plugin { public static final String PLUGIN_ID = "com.mindoo.domino.jna.xsp"; private static DominoJNAActivator plugin; private static Class m_jnaNativeClazz; @Override public void start(BundleContext context) throws Exception { super.start(context); plugin = this; if (m_jnaNativeClazz==null) { m_jnaNativeClazz = AccessController.doPrivileged(new PrivilegedAction<Class>() { @Override public Class run() { //enforce using the extracted JNA .dll/.so file instead of what we find on the PATH System.setProperty("jna.nosys", "true"); //change the library name from the default "jnidispatch" to our own name, so that //JNA does not load an jnidispatcher.dll from the Server's program directory String oldLibName = System.getProperty("jna.boot.library.name"); System.setProperty("jna.boot.library.name", "dominojnadispatch"); try { //loading the Native class runs its static code that extracts and loads the jna dll return DominoJNAActivator.class.forName("com.sun.jna.Native"); } catch (ClassNotFoundException e) { e.printStackTrace(); } finally { if (oldLibName!=null) System.setProperty("jna.boot.library.name",oldLibName); } return null; } }); } } @Override public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } public static DominoJNAActivator getDefault() { return plugin; } }
# ... existing code ... package com.mindoo.domino.jna.xsp.internal; import java.security.AccessController; import java.security.PrivilegedAction; import org.eclipse.core.runtime.Plugin; import org.osgi.framework.BundleContext; # ... modified code ... public static final String PLUGIN_ID = "com.mindoo.domino.jna.xsp"; private static DominoJNAActivator plugin; private static Class m_jnaNativeClazz; @Override public void start(BundleContext context) throws Exception { super.start(context); plugin = this; if (m_jnaNativeClazz==null) { m_jnaNativeClazz = AccessController.doPrivileged(new PrivilegedAction<Class>() { @Override public Class run() { //enforce using the extracted JNA .dll/.so file instead of what we find on the PATH System.setProperty("jna.nosys", "true"); //change the library name from the default "jnidispatch" to our own name, so that //JNA does not load an jnidispatcher.dll from the Server's program directory String oldLibName = System.getProperty("jna.boot.library.name"); System.setProperty("jna.boot.library.name", "dominojnadispatch"); try { //loading the Native class runs its static code that extracts and loads the jna dll return DominoJNAActivator.class.forName("com.sun.jna.Native"); } catch (ClassNotFoundException e) { e.printStackTrace(); } finally { if (oldLibName!=null) System.setProperty("jna.boot.library.name",oldLibName); } return null; } }); } } @Override # ... rest of the code ...
88cddb85016ba3609ec85a4e807a70fb85e56a59
src/main/java/uk/org/rbc1b/roms/controller/person/PersonsController.java
src/main/java/uk/org/rbc1b/roms/controller/person/PersonsController.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package uk.org.rbc1b.roms.controller.person; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import uk.org.rbc1b.roms.db.Person; /** * Control access to the underlying person data. * * @author oliver */ @Controller @RequestMapping("/persons") public class PersonsController { @Autowired private PersonDao personDao; /** * Person search. Pass in a candidate, match this against the user * first/last name and return the person object in JSON format * * @param query person match lookup * @return model containing the list of qualifications */ @RequestMapping(value = "search", method = RequestMethod.GET, headers = "Accept=application/json") //@PreAuthorize - not clear who will not be allowed to access @Transactional(readOnly = true) public List<Person> handleList(@PathVariable String query) { return personDao.findPersons(query); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package uk.org.rbc1b.roms.controller.person; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import uk.org.rbc1b.roms.db.Person; /** * Control access to the underlying person data. * * @author oliver */ @Controller @RequestMapping("/persons") public class PersonsController { @Autowired private PersonDao personDao; /** * Person search. Pass in a candidate, match this against the user * first/last name and return the person object in JSON format * * @param query person match lookup * @return model containing the list of people */ @RequestMapping(value = "search", method = RequestMethod.GET, headers = "Accept=application/json") //@PreAuthorize - not clear who will not be allowed to access @Transactional(readOnly = true) @ResponseBody public List<Person> handleList(@RequestParam String query) { return personDao.findPersons(query); } }
Correct controller for person look up (for type ahead). It returns the full list of objects matching the person
Correct controller for person look up (for type ahead). It returns the full list of objects matching the person
Java
mit
RBC1B/ROMS,RBC1B/ROMS,RBC1B/ROMS
java
## Code Before: /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package uk.org.rbc1b.roms.controller.person; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import uk.org.rbc1b.roms.db.Person; /** * Control access to the underlying person data. * * @author oliver */ @Controller @RequestMapping("/persons") public class PersonsController { @Autowired private PersonDao personDao; /** * Person search. Pass in a candidate, match this against the user * first/last name and return the person object in JSON format * * @param query person match lookup * @return model containing the list of qualifications */ @RequestMapping(value = "search", method = RequestMethod.GET, headers = "Accept=application/json") //@PreAuthorize - not clear who will not be allowed to access @Transactional(readOnly = true) public List<Person> handleList(@PathVariable String query) { return personDao.findPersons(query); } } ## Instruction: Correct controller for person look up (for type ahead). It returns the full list of objects matching the person ## Code After: /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package uk.org.rbc1b.roms.controller.person; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import uk.org.rbc1b.roms.db.Person; /** * Control access to the underlying person data. * * @author oliver */ @Controller @RequestMapping("/persons") public class PersonsController { @Autowired private PersonDao personDao; /** * Person search. Pass in a candidate, match this against the user * first/last name and return the person object in JSON format * * @param query person match lookup * @return model containing the list of people */ @RequestMapping(value = "search", method = RequestMethod.GET, headers = "Accept=application/json") //@PreAuthorize - not clear who will not be allowed to access @Transactional(readOnly = true) @ResponseBody public List<Person> handleList(@RequestParam String query) { return personDao.findPersons(query); } }
// ... existing code ... import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import uk.org.rbc1b.roms.db.Person; /** // ... modified code ... * first/last name and return the person object in JSON format * * @param query person match lookup * @return model containing the list of people */ @RequestMapping(value = "search", method = RequestMethod.GET, headers = "Accept=application/json") //@PreAuthorize - not clear who will not be allowed to access @Transactional(readOnly = true) @ResponseBody public List<Person> handleList(@RequestParam String query) { return personDao.findPersons(query); } } // ... rest of the code ...
1e23074630de96515cb2888b4c7bba34dd0a141b
52n-sir/src/test/java/org/n52/sir/harvest/JSHarvestTest.java
52n-sir/src/test/java/org/n52/sir/harvest/JSHarvestTest.java
package org.n52.sir.harvest; import java.io.File; import org.junit.Test; import org.n52.sir.datastructure.SirSensor; public class JSHarvestTest { @Test public void harvestJSFile() { File harvestScript = new File(ClassLoader.getSystemResource( "Requests/harvestScript.js").getFile()); } }
package org.n52.sir.harvest; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import java.io.File; import java.util.Collection; import org.junit.Test; import org.n52.sir.datastructure.SirSearchResultElement; import org.n52.sir.datastructure.SirSensor; import org.n52.sir.datastructure.detailed.SirDetailedSensorDescription; import org.n52.sir.ds.solr.SOLRSearchSensorDAO; import org.n52.sir.harvest.exec.IJSExecute; import org.n52.sir.harvest.exec.impl.RhinoJSExecute; import org.n52.sir.ows.OwsExceptionReport; public class JSHarvestTest { @Test public void harvestJSFile() throws OwsExceptionReport { File harvestScript = new File(ClassLoader.getSystemResource( "Requests/harvestScript.js").getFile()); IJSExecute execEngine = new RhinoJSExecute(); String id = execEngine.execute(harvestScript); assertNotNull(id); SOLRSearchSensorDAO searchDAO = new SOLRSearchSensorDAO(); Collection<SirSearchResultElement> elements = searchDAO.searchByID(id); assertNotEquals(elements.size(),0); SirSearchResultElement element = elements.iterator().next(); SirDetailedSensorDescription description = (SirDetailedSensorDescription)element.getSensorDescription(); Collection<Object> keywords = description.getKeywords(); assertTrue(keywords.contains("javascript")); assertTrue(keywords.contains("harvest")); assertTrue(description.getLocation().equals("3,1.5")); Collection<String> contacts = description.getContacts(); assertTrue(contacts.contains("52north")); assertTrue(contacts.contains("rhino")); } }
Test sensor JS harvest unit test
Test sensor JS harvest unit test
Java
apache-2.0
52North/OpenSensorSearch,nuest/OpenSensorSearch,52North/OpenSensorSearch,nuest/OpenSensorSearch,52North/OpenSensorSearch,nuest/OpenSensorSearch
java
## Code Before: package org.n52.sir.harvest; import java.io.File; import org.junit.Test; import org.n52.sir.datastructure.SirSensor; public class JSHarvestTest { @Test public void harvestJSFile() { File harvestScript = new File(ClassLoader.getSystemResource( "Requests/harvestScript.js").getFile()); } } ## Instruction: Test sensor JS harvest unit test ## Code After: package org.n52.sir.harvest; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import java.io.File; import java.util.Collection; import org.junit.Test; import org.n52.sir.datastructure.SirSearchResultElement; import org.n52.sir.datastructure.SirSensor; import org.n52.sir.datastructure.detailed.SirDetailedSensorDescription; import org.n52.sir.ds.solr.SOLRSearchSensorDAO; import org.n52.sir.harvest.exec.IJSExecute; import org.n52.sir.harvest.exec.impl.RhinoJSExecute; import org.n52.sir.ows.OwsExceptionReport; public class JSHarvestTest { @Test public void harvestJSFile() throws OwsExceptionReport { File harvestScript = new File(ClassLoader.getSystemResource( "Requests/harvestScript.js").getFile()); IJSExecute execEngine = new RhinoJSExecute(); String id = execEngine.execute(harvestScript); assertNotNull(id); SOLRSearchSensorDAO searchDAO = new SOLRSearchSensorDAO(); Collection<SirSearchResultElement> elements = searchDAO.searchByID(id); assertNotEquals(elements.size(),0); SirSearchResultElement element = elements.iterator().next(); SirDetailedSensorDescription description = (SirDetailedSensorDescription)element.getSensorDescription(); Collection<Object> keywords = description.getKeywords(); assertTrue(keywords.contains("javascript")); assertTrue(keywords.contains("harvest")); assertTrue(description.getLocation().equals("3,1.5")); Collection<String> contacts = description.getContacts(); assertTrue(contacts.contains("52north")); assertTrue(contacts.contains("rhino")); } }
... package org.n52.sir.harvest; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import java.io.File; import java.util.Collection; import org.junit.Test; import org.n52.sir.datastructure.SirSearchResultElement; import org.n52.sir.datastructure.SirSensor; import org.n52.sir.datastructure.detailed.SirDetailedSensorDescription; import org.n52.sir.ds.solr.SOLRSearchSensorDAO; import org.n52.sir.harvest.exec.IJSExecute; import org.n52.sir.harvest.exec.impl.RhinoJSExecute; import org.n52.sir.ows.OwsExceptionReport; public class JSHarvestTest { @Test public void harvestJSFile() throws OwsExceptionReport { File harvestScript = new File(ClassLoader.getSystemResource( "Requests/harvestScript.js").getFile()); IJSExecute execEngine = new RhinoJSExecute(); String id = execEngine.execute(harvestScript); assertNotNull(id); SOLRSearchSensorDAO searchDAO = new SOLRSearchSensorDAO(); Collection<SirSearchResultElement> elements = searchDAO.searchByID(id); assertNotEquals(elements.size(),0); SirSearchResultElement element = elements.iterator().next(); SirDetailedSensorDescription description = (SirDetailedSensorDescription)element.getSensorDescription(); Collection<Object> keywords = description.getKeywords(); assertTrue(keywords.contains("javascript")); assertTrue(keywords.contains("harvest")); assertTrue(description.getLocation().equals("3,1.5")); Collection<String> contacts = description.getContacts(); assertTrue(contacts.contains("52north")); assertTrue(contacts.contains("rhino")); } } ...
a162a996d9eafa168f0bf321df9bc2b6996fb9d3
src/test/java/com/gmail/cs475x/anybar4j/AnyBar4jTest.java
src/test/java/com/gmail/cs475x/anybar4j/AnyBar4jTest.java
package com.gmail.cs475x.anybar4j; import static org.junit.Assert.assertEquals; import org.junit.Test; import com.gmail.cs475x.anybar4j.AnyBar4j.AnyBarImage; public class AnyBar4jTest { @Test public void shouldWorkWithDefaultHostAndPort() { Exception exception = null; try { AnyBar4j anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, AnyBar4j.DEFAULT_PORT); anybar.setImage(AnyBarImage.GREEN); anybar.close(); } catch (Exception e) { exception = e; } assertEquals(null, exception); } @Test public void shouldUseDefaultPortIfSuppliedPortIsLessThanOrEqualToZero() { AnyBar4j anybar = null; Exception exception = null; try { anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, -1); anybar.close(); } catch (Exception e) { exception = e; } assertEquals(null, exception); assertEquals(AnyBar4j.DEFAULT_PORT, anybar.port); } }
package com.gmail.cs475x.anybar4j; import static org.junit.Assert.assertEquals; import org.junit.Test; import com.gmail.cs475x.anybar4j.AnyBar4j.AnyBarImage; public class AnyBar4jTest { @Test public void shouldWorkWithDefaultHostAndPort() { AnyBar4j anybar = null; Exception exception = null; try { anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, AnyBar4j.DEFAULT_PORT); anybar.setImage(AnyBarImage.GREEN); } catch (Exception e) { exception = e; } finally { if (anybar != null) { anybar.close(); } } assertEquals(null, exception); } @Test public void shouldUseDefaultPortIfSuppliedPortIsLessThanOrEqualToZero() { AnyBar4j anybar = null; Exception exception = null; try { anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, -1); } catch (Exception e) { exception = e; } finally { if (anybar != null) { anybar.close(); } } assertEquals(null, exception); assertEquals(AnyBar4j.DEFAULT_PORT, anybar.port); } }
Move `close()` calls to finally block
Move `close()` calls to finally block
Java
mit
cs475x/AnyBar4j
java
## Code Before: package com.gmail.cs475x.anybar4j; import static org.junit.Assert.assertEquals; import org.junit.Test; import com.gmail.cs475x.anybar4j.AnyBar4j.AnyBarImage; public class AnyBar4jTest { @Test public void shouldWorkWithDefaultHostAndPort() { Exception exception = null; try { AnyBar4j anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, AnyBar4j.DEFAULT_PORT); anybar.setImage(AnyBarImage.GREEN); anybar.close(); } catch (Exception e) { exception = e; } assertEquals(null, exception); } @Test public void shouldUseDefaultPortIfSuppliedPortIsLessThanOrEqualToZero() { AnyBar4j anybar = null; Exception exception = null; try { anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, -1); anybar.close(); } catch (Exception e) { exception = e; } assertEquals(null, exception); assertEquals(AnyBar4j.DEFAULT_PORT, anybar.port); } } ## Instruction: Move `close()` calls to finally block ## Code After: package com.gmail.cs475x.anybar4j; import static org.junit.Assert.assertEquals; import org.junit.Test; import com.gmail.cs475x.anybar4j.AnyBar4j.AnyBarImage; public class AnyBar4jTest { @Test public void shouldWorkWithDefaultHostAndPort() { AnyBar4j anybar = null; Exception exception = null; try { anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, AnyBar4j.DEFAULT_PORT); anybar.setImage(AnyBarImage.GREEN); } catch (Exception e) { exception = e; } finally { if (anybar != null) { anybar.close(); } } assertEquals(null, exception); } @Test public void shouldUseDefaultPortIfSuppliedPortIsLessThanOrEqualToZero() { AnyBar4j anybar = null; Exception exception = null; try { anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, -1); } catch (Exception e) { exception = e; } finally { if (anybar != null) { anybar.close(); } } assertEquals(null, exception); assertEquals(AnyBar4j.DEFAULT_PORT, anybar.port); } }
# ... existing code ... @Test public void shouldWorkWithDefaultHostAndPort() { AnyBar4j anybar = null; Exception exception = null; try { anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, AnyBar4j.DEFAULT_PORT); anybar.setImage(AnyBarImage.GREEN); } catch (Exception e) { exception = e; } finally { if (anybar != null) { anybar.close(); } } assertEquals(null, exception); # ... modified code ... try { anybar = new AnyBar4j(AnyBar4j.DEFAULT_HOST, -1); } catch (Exception e) { exception = e; } finally { if (anybar != null) { anybar.close(); } } assertEquals(null, exception); # ... rest of the code ...
b80e1facf3c47364384fa04f764838ba1b8cb55c
form_designer/apps.py
form_designer/apps.py
from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ class FormDesignerConfig(AppConfig): name = "form_designer" verbose_name = _("Form Designer")
from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ class FormDesignerConfig(AppConfig): default_auto_field = "django.db.models.AutoField" name = "form_designer" verbose_name = _("Form Designer")
Set the default auto field to be AutoField
Set the default auto field to be AutoField On django 3.2 it creates a migration to be BigAutoField. This fixes it.
Python
bsd-3-clause
feincms/form_designer,feincms/form_designer
python
## Code Before: from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ class FormDesignerConfig(AppConfig): name = "form_designer" verbose_name = _("Form Designer") ## Instruction: Set the default auto field to be AutoField On django 3.2 it creates a migration to be BigAutoField. This fixes it. ## Code After: from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ class FormDesignerConfig(AppConfig): default_auto_field = "django.db.models.AutoField" name = "form_designer" verbose_name = _("Form Designer")
# ... existing code ... class FormDesignerConfig(AppConfig): default_auto_field = "django.db.models.AutoField" name = "form_designer" verbose_name = _("Form Designer") # ... rest of the code ...
253e0aa2d99f06a4fff6f01f224353e2d036cc0c
app/src/main/java/org/stepic/droid/configuration/RemoteConfig.kt
app/src/main/java/org/stepic/droid/configuration/RemoteConfig.kt
package org.stepic.droid.configuration object RemoteConfig { const val PREFIX = "remote_config_" const val MIN_DELAY_RATE_DIALOG_SEC = "min_delay_rate_dialog_sec" const val SHOW_STREAK_DIALOG_AFTER_LOGIN = "show_streak_dialog_after_login" const val SHOW_NOTIFICATIONS_BADGES = "show_notifications_badges" const val ADAPTIVE_COURSES = "adaptive_courses_android" const val ADAPTIVE_BACKEND_URL = "adaptive_backend_url" const val IS_LOCAL_SUBMISSIONS_ENABLED = "is_local_submissions_enabled" const val IS_PEER_REVIEW_ENABLED = "is_peer_review_enabled" const val IS_DISABLED_STEPS_SUPPORTED = "is_disabled_steps_supported" const val SEARCH_QUERY_PARAMS_ANDROID = "search_query_params_android" }
package org.stepic.droid.configuration object RemoteConfig { const val PREFIX = "remote_config_" const val MIN_DELAY_RATE_DIALOG_SEC = "min_delay_rate_dialog_sec" const val SHOW_STREAK_DIALOG_AFTER_LOGIN = "show_streak_dialog_after_login" const val SHOW_NOTIFICATIONS_BADGES = "show_notifications_badges" const val ADAPTIVE_COURSES = "adaptive_courses_android" const val ADAPTIVE_BACKEND_URL = "adaptive_backend_url" const val IS_LOCAL_SUBMISSIONS_ENABLED = "is_local_submissions_enabled" const val IS_PEER_REVIEW_ENABLED = "is_peer_review_enabled" const val IS_DISABLED_STEPS_SUPPORTED = "is_disabled_steps_supported" const val SEARCH_QUERY_PARAMS_ANDROID = "search_query_params_android" const val IS_NEW_HOME_SCREEN_ENABLED = "is_new_home_screen_enabled" }
Add feature flag to remote config
Add feature flag to remote config
Kotlin
apache-2.0
StepicOrg/stepik-android,StepicOrg/stepic-android,StepicOrg/stepik-android,StepicOrg/stepic-android,StepicOrg/stepic-android,StepicOrg/stepik-android
kotlin
## Code Before: package org.stepic.droid.configuration object RemoteConfig { const val PREFIX = "remote_config_" const val MIN_DELAY_RATE_DIALOG_SEC = "min_delay_rate_dialog_sec" const val SHOW_STREAK_DIALOG_AFTER_LOGIN = "show_streak_dialog_after_login" const val SHOW_NOTIFICATIONS_BADGES = "show_notifications_badges" const val ADAPTIVE_COURSES = "adaptive_courses_android" const val ADAPTIVE_BACKEND_URL = "adaptive_backend_url" const val IS_LOCAL_SUBMISSIONS_ENABLED = "is_local_submissions_enabled" const val IS_PEER_REVIEW_ENABLED = "is_peer_review_enabled" const val IS_DISABLED_STEPS_SUPPORTED = "is_disabled_steps_supported" const val SEARCH_QUERY_PARAMS_ANDROID = "search_query_params_android" } ## Instruction: Add feature flag to remote config ## Code After: package org.stepic.droid.configuration object RemoteConfig { const val PREFIX = "remote_config_" const val MIN_DELAY_RATE_DIALOG_SEC = "min_delay_rate_dialog_sec" const val SHOW_STREAK_DIALOG_AFTER_LOGIN = "show_streak_dialog_after_login" const val SHOW_NOTIFICATIONS_BADGES = "show_notifications_badges" const val ADAPTIVE_COURSES = "adaptive_courses_android" const val ADAPTIVE_BACKEND_URL = "adaptive_backend_url" const val IS_LOCAL_SUBMISSIONS_ENABLED = "is_local_submissions_enabled" const val IS_PEER_REVIEW_ENABLED = "is_peer_review_enabled" const val IS_DISABLED_STEPS_SUPPORTED = "is_disabled_steps_supported" const val SEARCH_QUERY_PARAMS_ANDROID = "search_query_params_android" const val IS_NEW_HOME_SCREEN_ENABLED = "is_new_home_screen_enabled" }
// ... existing code ... const val IS_PEER_REVIEW_ENABLED = "is_peer_review_enabled" const val IS_DISABLED_STEPS_SUPPORTED = "is_disabled_steps_supported" const val SEARCH_QUERY_PARAMS_ANDROID = "search_query_params_android" const val IS_NEW_HOME_SCREEN_ENABLED = "is_new_home_screen_enabled" } // ... rest of the code ...
6490273d2a5da74c6508f1ced198d05f16dcfe39
src/org/biojava/bio/symbol/DNANoAmbPack.java
src/org/biojava/bio/symbol/DNANoAmbPack.java
package org.biojava.bio.symbol; import java.util.*; import org.biojava.bio.seq.*; /** * @author Matthew Pocock */ public class DNANoAmbPack implements Packing { Symbol placeHolder; public DNANoAmbPack(Symbol placeHolder) { this.placeHolder = placeHolder; } public FiniteAlphabet getAlphabet() { return DNATools.getDNA(); } public byte pack(Symbol sym) { if(false) { } else if(sym == DNATools.a()) { return 0; } else if(sym == DNATools.g()) { return 1; } else if(sym == DNATools.c()) { return 2; } else if(sym == DNATools.t()) { return 3; } return pack(placeHolder); } public Symbol unpack(byte b) throws IllegalSymbolException { if(false) { } else if(b == 0) { return DNATools.a(); } else if(b == 1) { return DNATools.g(); } else if(b == 2) { return DNATools.c(); } else if(b == 3) { return DNATools.t(); } throw new IllegalSymbolException("Can't unpack: " + b); } public byte wordSize() { return 2; } public boolean handlesAmbiguity() { return false; } }
package org.biojava.bio.symbol; import java.util.*; import org.biojava.bio.seq.*; /** * @author Matthew Pocock * @author Thomas Down */ public class DNANoAmbPack implements Packing { final byte placeHolder; public DNANoAmbPack(byte placeHolder) { this.placeHolder = placeHolder; } public DNANoAmbPack(Symbol placeHolderSymbol) { this.placeHolder = pack(placeHolderSymbol); } public FiniteAlphabet getAlphabet() { return DNATools.getDNA(); } public byte pack(Symbol sym) { if(false) { } else if(sym == DNATools.a()) { return 0; } else if(sym == DNATools.g()) { return 1; } else if(sym == DNATools.c()) { return 2; } else if(sym == DNATools.t()) { return 3; } return placeHolder; } public Symbol unpack(byte b) throws IllegalSymbolException { if(false) { } else if(b == 0) { return DNATools.a(); } else if(b == 1) { return DNATools.g(); } else if(b == 2) { return DNATools.c(); } else if(b == 3) { return DNATools.t(); } throw new IllegalSymbolException("Can't unpack: " + b); } public byte wordSize() { return 2; } public boolean handlesAmbiguity() { return false; } }
Allow proper differentiation of unknown (e.g. ambiguity) symbols.
Allow proper differentiation of unknown (e.g. ambiguity) symbols. git-svn-id: ed25c26de1c5325e8eb0deed0b990ab8af8a4def@2038 7c6358e6-4a41-0410-a743-a5b2a554c398
Java
lgpl-2.1
sbliven/biojava,sbliven/biojava,sbliven/biojava
java
## Code Before: package org.biojava.bio.symbol; import java.util.*; import org.biojava.bio.seq.*; /** * @author Matthew Pocock */ public class DNANoAmbPack implements Packing { Symbol placeHolder; public DNANoAmbPack(Symbol placeHolder) { this.placeHolder = placeHolder; } public FiniteAlphabet getAlphabet() { return DNATools.getDNA(); } public byte pack(Symbol sym) { if(false) { } else if(sym == DNATools.a()) { return 0; } else if(sym == DNATools.g()) { return 1; } else if(sym == DNATools.c()) { return 2; } else if(sym == DNATools.t()) { return 3; } return pack(placeHolder); } public Symbol unpack(byte b) throws IllegalSymbolException { if(false) { } else if(b == 0) { return DNATools.a(); } else if(b == 1) { return DNATools.g(); } else if(b == 2) { return DNATools.c(); } else if(b == 3) { return DNATools.t(); } throw new IllegalSymbolException("Can't unpack: " + b); } public byte wordSize() { return 2; } public boolean handlesAmbiguity() { return false; } } ## Instruction: Allow proper differentiation of unknown (e.g. ambiguity) symbols. git-svn-id: ed25c26de1c5325e8eb0deed0b990ab8af8a4def@2038 7c6358e6-4a41-0410-a743-a5b2a554c398 ## Code After: package org.biojava.bio.symbol; import java.util.*; import org.biojava.bio.seq.*; /** * @author Matthew Pocock * @author Thomas Down */ public class DNANoAmbPack implements Packing { final byte placeHolder; public DNANoAmbPack(byte placeHolder) { this.placeHolder = placeHolder; } public DNANoAmbPack(Symbol placeHolderSymbol) { this.placeHolder = pack(placeHolderSymbol); } public FiniteAlphabet getAlphabet() { return DNATools.getDNA(); } public byte pack(Symbol sym) { if(false) { } else if(sym == DNATools.a()) { return 0; } else if(sym == DNATools.g()) { return 1; } else if(sym == DNATools.c()) { return 2; } else if(sym == DNATools.t()) { return 3; } return placeHolder; } public Symbol unpack(byte b) throws IllegalSymbolException { if(false) { } else if(b == 0) { return DNATools.a(); } else if(b == 1) { return DNATools.g(); } else if(b == 2) { return DNATools.c(); } else if(b == 3) { return DNATools.t(); } throw new IllegalSymbolException("Can't unpack: " + b); } public byte wordSize() { return 2; } public boolean handlesAmbiguity() { return false; } }
// ... existing code ... /** * @author Matthew Pocock * @author Thomas Down */ public class DNANoAmbPack implements Packing { final byte placeHolder; public DNANoAmbPack(byte placeHolder) { this.placeHolder = placeHolder; } public DNANoAmbPack(Symbol placeHolderSymbol) { this.placeHolder = pack(placeHolderSymbol); } public FiniteAlphabet getAlphabet() { return DNATools.getDNA(); } public byte pack(Symbol sym) { if(false) { // ... modified code ... return 3; } return placeHolder; } public Symbol unpack(byte b) // ... rest of the code ...
2727fccdb3672e1c7b28e4ba94ec743b53298f26
src/main.py
src/main.py
''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() if __name__ == '__main__':main()
''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 import program13 as p13 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() p13.launchApp() if __name__ == '__main__':main()
Include Text App in Main
Include Text App in Main
Python
mit
deshadi/python-gui-demos
python
## Code Before: ''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() if __name__ == '__main__':main() ## Instruction: Include Text App in Main ## Code After: ''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 import program13 as p13 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() p13.launchApp() if __name__ == '__main__':main()
... import program10 as p10 import program11 as p11 import program12 as p12 import program13 as p13 def main(): p1.sayhello() ... p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() p13.launchApp() if __name__ == '__main__':main() ...
b44b0f68a2dd00df1ec074cf39a66ce81cd0dae2
nowplaying.py
nowplaying.py
from termcolor import colored from appscript import * from track import Track def main(): print(get_song()) def get_song(): itunes_open = bool(app('System Events').processes[its.name == 'iTunes'].count()) if itunes_open: # check if application open itunes = app('iTunes') if itunes.player_state.get() == k.playing: # check if song playing track = Track(itunes.current_track.get()) return track if __name__ == '__main__': main()
from termcolor import colored from appscript import * from track import Track def main(): print(get_song()) def get_song(): itunes_open = bool(app('System Events').processes[its.name == 'iTunes'].count()) if itunes_open: # check if application open itunes = app('iTunes') if itunes.player_state.get() == k.playing: # check if song playing track = Track(itunes.current_track.get()) return track else: return colored('No song currently playing.', 'red') else: return colored('iTunes not open.', 'red') if __name__ == '__main__': main()
Update error output for app not open/song not playing
Update error output for app not open/song not playing
Python
mit
kshvmdn/nowplaying
python
## Code Before: from termcolor import colored from appscript import * from track import Track def main(): print(get_song()) def get_song(): itunes_open = bool(app('System Events').processes[its.name == 'iTunes'].count()) if itunes_open: # check if application open itunes = app('iTunes') if itunes.player_state.get() == k.playing: # check if song playing track = Track(itunes.current_track.get()) return track if __name__ == '__main__': main() ## Instruction: Update error output for app not open/song not playing ## Code After: from termcolor import colored from appscript import * from track import Track def main(): print(get_song()) def get_song(): itunes_open = bool(app('System Events').processes[its.name == 'iTunes'].count()) if itunes_open: # check if application open itunes = app('iTunes') if itunes.player_state.get() == k.playing: # check if song playing track = Track(itunes.current_track.get()) return track else: return colored('No song currently playing.', 'red') else: return colored('iTunes not open.', 'red') if __name__ == '__main__': main()
# ... existing code ... if itunes.player_state.get() == k.playing: # check if song playing track = Track(itunes.current_track.get()) return track else: return colored('No song currently playing.', 'red') else: return colored('iTunes not open.', 'red') if __name__ == '__main__': main() # ... rest of the code ...
2dec3e5810ef9ba532eaa735d0eac149c240aa2f
pyxrf/api.py
pyxrf/api.py
import logging logger = logging.getLogger() try: from .model.load_data_from_db import db, db_analysis except ImportError: db = None db_analysis = None logger.error('databroker is not available.')
from .model.fileio import (stitch_fitted_results, spec_to_hdf, create_movie, # noqa: F401 combine_data_to_recon, h5file_for_recon, export_to_view, # noqa: F401 make_hdf_stitched) # noqa: F401 from .model.load_data_from_db import make_hdf, export1d # noqa: F401 from .model.command_tools import fit_pixel_data_and_save, pyxrf_batch # noqa: F401 # Note: the statement '# noqa: F401' is telling flake8 to ignore violation F401 at the given line # Violation F401 - the package is imported but unused import logging logger = logging.getLogger() try: from .model.load_data_from_db import db, db_analysis except ImportError: db = None db_analysis = None logger.error('databroker is not available.')
Set flake8 to ignore F401 violations
Set flake8 to ignore F401 violations
Python
bsd-3-clause
NSLS-II/PyXRF,NSLS-II-HXN/PyXRF,NSLS-II-HXN/PyXRF
python
## Code Before: import logging logger = logging.getLogger() try: from .model.load_data_from_db import db, db_analysis except ImportError: db = None db_analysis = None logger.error('databroker is not available.') ## Instruction: Set flake8 to ignore F401 violations ## Code After: from .model.fileio import (stitch_fitted_results, spec_to_hdf, create_movie, # noqa: F401 combine_data_to_recon, h5file_for_recon, export_to_view, # noqa: F401 make_hdf_stitched) # noqa: F401 from .model.load_data_from_db import make_hdf, export1d # noqa: F401 from .model.command_tools import fit_pixel_data_and_save, pyxrf_batch # noqa: F401 # Note: the statement '# noqa: F401' is telling flake8 to ignore violation F401 at the given line # Violation F401 - the package is imported but unused import logging logger = logging.getLogger() try: from .model.load_data_from_db import db, db_analysis except ImportError: db = None db_analysis = None logger.error('databroker is not available.')
# ... existing code ... from .model.fileio import (stitch_fitted_results, spec_to_hdf, create_movie, # noqa: F401 combine_data_to_recon, h5file_for_recon, export_to_view, # noqa: F401 make_hdf_stitched) # noqa: F401 from .model.load_data_from_db import make_hdf, export1d # noqa: F401 from .model.command_tools import fit_pixel_data_and_save, pyxrf_batch # noqa: F401 # Note: the statement '# noqa: F401' is telling flake8 to ignore violation F401 at the given line # Violation F401 - the package is imported but unused import logging logger = logging.getLogger() # ... rest of the code ...
5a09c6e9545373cece95f87ed28579f05959fced
tests/skip_check.py
tests/skip_check.py
from __future__ import absolute_import, division, print_function import pytest def skip_check(name, iface, item): if name in item.keywords and item.funcargs.get('backend') is not None: if not isinstance(item.funcargs['backend'], iface): pytest.skip("Backend does not support {0}".format(name))
from __future__ import absolute_import, division, print_function import pytest def skip_check(name, iface, item): if name in item.keywords and "backend" in item.funcargs: if not isinstance(item.funcargs["backend"], iface): pytest.skip("{0} backend does not support {1}".format( item.funcargs["backend"], name ))
Include teh name of the backend in the error message
Include teh name of the backend in the error message
Python
bsd-3-clause
Hasimir/cryptography,skeuomorf/cryptography,skeuomorf/cryptography,dstufft/cryptography,bwhmather/cryptography,skeuomorf/cryptography,Lukasa/cryptography,bwhmather/cryptography,Hasimir/cryptography,kimvais/cryptography,sholsapp/cryptography,Ayrx/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,Lukasa/cryptography,sholsapp/cryptography,Ayrx/cryptography,dstufft/cryptography,dstufft/cryptography,bwhmather/cryptography,glyph/cryptography,bwhmather/cryptography,kimvais/cryptography,kimvais/cryptography,Ayrx/cryptography,dstufft/cryptography,glyph/cryptography,skeuomorf/cryptography,Lukasa/cryptography,kimvais/cryptography,sholsapp/cryptography,Ayrx/cryptography,Hasimir/cryptography
python
## Code Before: from __future__ import absolute_import, division, print_function import pytest def skip_check(name, iface, item): if name in item.keywords and item.funcargs.get('backend') is not None: if not isinstance(item.funcargs['backend'], iface): pytest.skip("Backend does not support {0}".format(name)) ## Instruction: Include teh name of the backend in the error message ## Code After: from __future__ import absolute_import, division, print_function import pytest def skip_check(name, iface, item): if name in item.keywords and "backend" in item.funcargs: if not isinstance(item.funcargs["backend"], iface): pytest.skip("{0} backend does not support {1}".format( item.funcargs["backend"], name ))
... def skip_check(name, iface, item): if name in item.keywords and "backend" in item.funcargs: if not isinstance(item.funcargs["backend"], iface): pytest.skip("{0} backend does not support {1}".format( item.funcargs["backend"], name )) ...
167d93eef63aa1e633fec6439dfaa59e0e62b433
app/src/main/java/net/squanchy/support/font/TypefaceDelegate.kt
app/src/main/java/net/squanchy/support/font/TypefaceDelegate.kt
package net.squanchy.support.font import android.graphics.Paint import android.graphics.Typeface class TypefaceDelegate(private val newTypeface: Typeface) { private companion object { @SuppressWarnings("MagicNumber") const val TEXT_SKEW_X = -.25f const val FALSE_FLAG = 0 } fun applyTypefaceTo(paint: Paint) { val fakeStyle = fakeStyle(paint) if (bold(fakeStyle)) { paint.isFakeBoldText = true } if (italic(fakeStyle)) { paint.textSkewX = TEXT_SKEW_X } paint.typeface = newTypeface } private fun bold(fakeStyle: Int) = fakeStyle.and(Typeface.BOLD) != FALSE_FLAG private fun italic(fakeStyle: Int) = fakeStyle.and(Typeface.ITALIC) != FALSE_FLAG private fun fakeStyle(paint: Paint): Int { val oldStyle = paint.typeface?.style ?: 0 return oldStyle.and(newTypeface.style.inv()) } }
package net.squanchy.support.font import android.graphics.Paint import android.graphics.Typeface class TypefaceDelegate(private val newTypeface: Typeface) { private companion object { @SuppressWarnings("MagicNumber") const val TEXT_SKEW_X = -.25f const val FALSE_FLAG = 0 } fun applyTypefaceTo(paint: Paint) { val previousStyle = computePreviousStyle(paint) if (isBold(previousStyle)) { paint.isFakeBoldText = true } if (isItalic(previousStyle)) { paint.textSkewX = TEXT_SKEW_X } paint.typeface = newTypeface } private fun isBold(fakeStyle: Int) = fakeStyle.and(Typeface.BOLD) != FALSE_FLAG private fun isItalic(fakeStyle: Int) = fakeStyle.and(Typeface.ITALIC) != FALSE_FLAG private fun computePreviousStyle(paint: Paint): Int { val oldStyle = paint.typeface?.style ?: 0 return oldStyle.and(newTypeface.style.inv()) } }
Change naming to make the code easier to understand
Change naming to make the code easier to understand
Kotlin
apache-2.0
squanchy-dev/squanchy-android,squanchy-dev/squanchy-android,squanchy-dev/squanchy-android
kotlin
## Code Before: package net.squanchy.support.font import android.graphics.Paint import android.graphics.Typeface class TypefaceDelegate(private val newTypeface: Typeface) { private companion object { @SuppressWarnings("MagicNumber") const val TEXT_SKEW_X = -.25f const val FALSE_FLAG = 0 } fun applyTypefaceTo(paint: Paint) { val fakeStyle = fakeStyle(paint) if (bold(fakeStyle)) { paint.isFakeBoldText = true } if (italic(fakeStyle)) { paint.textSkewX = TEXT_SKEW_X } paint.typeface = newTypeface } private fun bold(fakeStyle: Int) = fakeStyle.and(Typeface.BOLD) != FALSE_FLAG private fun italic(fakeStyle: Int) = fakeStyle.and(Typeface.ITALIC) != FALSE_FLAG private fun fakeStyle(paint: Paint): Int { val oldStyle = paint.typeface?.style ?: 0 return oldStyle.and(newTypeface.style.inv()) } } ## Instruction: Change naming to make the code easier to understand ## Code After: package net.squanchy.support.font import android.graphics.Paint import android.graphics.Typeface class TypefaceDelegate(private val newTypeface: Typeface) { private companion object { @SuppressWarnings("MagicNumber") const val TEXT_SKEW_X = -.25f const val FALSE_FLAG = 0 } fun applyTypefaceTo(paint: Paint) { val previousStyle = computePreviousStyle(paint) if (isBold(previousStyle)) { paint.isFakeBoldText = true } if (isItalic(previousStyle)) { paint.textSkewX = TEXT_SKEW_X } paint.typeface = newTypeface } private fun isBold(fakeStyle: Int) = fakeStyle.and(Typeface.BOLD) != FALSE_FLAG private fun isItalic(fakeStyle: Int) = fakeStyle.and(Typeface.ITALIC) != FALSE_FLAG private fun computePreviousStyle(paint: Paint): Int { val oldStyle = paint.typeface?.style ?: 0 return oldStyle.and(newTypeface.style.inv()) } }
# ... existing code ... } fun applyTypefaceTo(paint: Paint) { val previousStyle = computePreviousStyle(paint) if (isBold(previousStyle)) { paint.isFakeBoldText = true } if (isItalic(previousStyle)) { paint.textSkewX = TEXT_SKEW_X } # ... modified code ... paint.typeface = newTypeface } private fun isBold(fakeStyle: Int) = fakeStyle.and(Typeface.BOLD) != FALSE_FLAG private fun isItalic(fakeStyle: Int) = fakeStyle.and(Typeface.ITALIC) != FALSE_FLAG private fun computePreviousStyle(paint: Paint): Int { val oldStyle = paint.typeface?.style ?: 0 return oldStyle.and(newTypeface.style.inv()) } # ... rest of the code ...
1e6a424e2669441e6910d3a2803bc139df16dd51
new_validity.py
new_validity.py
import pandas as pd import numpy as np import operator from sys import argv import os def extract( file_name ): with open(file_name) as f: for i,line in enumerate(f,1): if "SCN" in line: return i def main(lta_name): os.system('ltahdr -i'+ lta_name + '> lta_file.txt') dictionary = {} #lta_file = str(argv[1]) skipped_rows = extract('lta_file.txt')-1 header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+") flux = list(set(header["OBJECT"])) #print flux header['Nrecs'] = header['Nrecs'].astype(float) for i in flux : temp = header.loc[header.OBJECT==i,'Nrecs'].values temp = np.mean(temp) dictionary[i]=temp #print dictionary source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0] return source
import pandas as pd import numpy as np import operator from sys import argv import os def extract( file_name ): with open(file_name) as f: for i,line in enumerate(f,1): if "SCN" in line: return i def main(): lta_file = str(argv[1]) calibrator_list = ['3C48', '3C147', '3C286'] os.system('ltahdr -i'+ lta_file + '> lta_file.txt') dictionary = {} try: skipped_rows = extract('lta_file.txt')-1 header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+") flux = list(set(header["OBJECT"])) #print flux header['Nrecs'] = header['Nrecs'].astype(float) for i in flux : temp = header.loc[header.OBJECT==i,'Nrecs'].values temp = np.mean(temp) dictionary[i]=temp print dictionary #Sort the list of targets according to the number of recordings list_of_targets = [ i for i,j in sorted(dictionary.iteritems(),key=operator.itemgetter(1), reverse=True)] source = max(list_of_targets) for i in len(flux): if source in calibrator_list: continue else: return source except: pass print main()
Add scratch file for testing new validity
Add scratch file for testing new validity
Python
mit
NCRA-TIFR/gadpu,NCRA-TIFR/gadpu
python
## Code Before: import pandas as pd import numpy as np import operator from sys import argv import os def extract( file_name ): with open(file_name) as f: for i,line in enumerate(f,1): if "SCN" in line: return i def main(lta_name): os.system('ltahdr -i'+ lta_name + '> lta_file.txt') dictionary = {} #lta_file = str(argv[1]) skipped_rows = extract('lta_file.txt')-1 header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+") flux = list(set(header["OBJECT"])) #print flux header['Nrecs'] = header['Nrecs'].astype(float) for i in flux : temp = header.loc[header.OBJECT==i,'Nrecs'].values temp = np.mean(temp) dictionary[i]=temp #print dictionary source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0] return source ## Instruction: Add scratch file for testing new validity ## Code After: import pandas as pd import numpy as np import operator from sys import argv import os def extract( file_name ): with open(file_name) as f: for i,line in enumerate(f,1): if "SCN" in line: return i def main(): lta_file = str(argv[1]) calibrator_list = ['3C48', '3C147', '3C286'] os.system('ltahdr -i'+ lta_file + '> lta_file.txt') dictionary = {} try: skipped_rows = extract('lta_file.txt')-1 header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+") flux = list(set(header["OBJECT"])) #print flux header['Nrecs'] = header['Nrecs'].astype(float) for i in flux : temp = header.loc[header.OBJECT==i,'Nrecs'].values temp = np.mean(temp) dictionary[i]=temp print dictionary #Sort the list of targets according to the number of recordings list_of_targets = [ i for i,j in sorted(dictionary.iteritems(),key=operator.itemgetter(1), reverse=True)] source = max(list_of_targets) for i in len(flux): if source in calibrator_list: continue else: return source except: pass print main()
... for i,line in enumerate(f,1): if "SCN" in line: return i def main(): lta_file = str(argv[1]) calibrator_list = ['3C48', '3C147', '3C286'] os.system('ltahdr -i'+ lta_file + '> lta_file.txt') dictionary = {} try: skipped_rows = extract('lta_file.txt')-1 header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+") flux = list(set(header["OBJECT"])) #print flux header['Nrecs'] = header['Nrecs'].astype(float) for i in flux : temp = header.loc[header.OBJECT==i,'Nrecs'].values temp = np.mean(temp) dictionary[i]=temp print dictionary #Sort the list of targets according to the number of recordings list_of_targets = [ i for i,j in sorted(dictionary.iteritems(),key=operator.itemgetter(1), reverse=True)] source = max(list_of_targets) for i in len(flux): if source in calibrator_list: continue else: return source except: pass print main() ...
f8cbdf038a3d5cd8ba229cb627ecd1831265ca02
context.py
context.py
from llvm.core import Module from llvm.ee import ExecutionEngine from llvm.passes import (FunctionPassManager, PASS_GVN, PASS_INSTCOMBINE, PASS_REASSOCIATE, PASS_SIMPLIFYCFG) class Context(object): optimizations = (PASS_GVN, PASS_INSTCOMBINE, PASS_REASSOCIATE, PASS_SIMPLIFYCFG) def __init__(self, name): self.name = name self.module = Module.new(name) self.builder = None self.scope = {} self.executor = ExecutionEngine.new(self.module) self.fpm = self.setup_fpm() def setup_fpm(self): fpm = FunctionPassManager.new(self.module) for optimization in self.optimizations: fpm.add(optimization) fpm.initialize() return fpm
from llvm.core import Module from llvm.ee import ExecutionEngine from llvm.passes import (FunctionPassManager, PASS_GVN, PASS_INSTCOMBINE, PASS_REASSOCIATE, PASS_SIMPLIFYCFG) class Context(object): optimizations = (PASS_GVN, PASS_INSTCOMBINE, PASS_REASSOCIATE, PASS_SIMPLIFYCFG) def __init__(self, name): self.name = name self.module = Module.new(name) self.builder = None self.scope = {} self.executor = ExecutionEngine.new(self.module) self.fpm = self.setup_fpm() def setup_fpm(self): fpm = FunctionPassManager.new(self.module) # github.com/llvmpy/llvmpy/issues/44 fpm.add(self.executor.target_data.clone()) for optimization in self.optimizations: fpm.add(optimization) fpm.initialize() return fpm
Add target_data from ExecutionEngine to FunctionPassManager
Add target_data from ExecutionEngine to FunctionPassManager
Python
mit
guilload/kaleidoscope
python
## Code Before: from llvm.core import Module from llvm.ee import ExecutionEngine from llvm.passes import (FunctionPassManager, PASS_GVN, PASS_INSTCOMBINE, PASS_REASSOCIATE, PASS_SIMPLIFYCFG) class Context(object): optimizations = (PASS_GVN, PASS_INSTCOMBINE, PASS_REASSOCIATE, PASS_SIMPLIFYCFG) def __init__(self, name): self.name = name self.module = Module.new(name) self.builder = None self.scope = {} self.executor = ExecutionEngine.new(self.module) self.fpm = self.setup_fpm() def setup_fpm(self): fpm = FunctionPassManager.new(self.module) for optimization in self.optimizations: fpm.add(optimization) fpm.initialize() return fpm ## Instruction: Add target_data from ExecutionEngine to FunctionPassManager ## Code After: from llvm.core import Module from llvm.ee import ExecutionEngine from llvm.passes import (FunctionPassManager, PASS_GVN, PASS_INSTCOMBINE, PASS_REASSOCIATE, PASS_SIMPLIFYCFG) class Context(object): optimizations = (PASS_GVN, PASS_INSTCOMBINE, PASS_REASSOCIATE, PASS_SIMPLIFYCFG) def __init__(self, name): self.name = name self.module = Module.new(name) self.builder = None self.scope = {} self.executor = ExecutionEngine.new(self.module) self.fpm = self.setup_fpm() def setup_fpm(self): fpm = FunctionPassManager.new(self.module) # github.com/llvmpy/llvmpy/issues/44 fpm.add(self.executor.target_data.clone()) for optimization in self.optimizations: fpm.add(optimization) fpm.initialize() return fpm
# ... existing code ... def setup_fpm(self): fpm = FunctionPassManager.new(self.module) # github.com/llvmpy/llvmpy/issues/44 fpm.add(self.executor.target_data.clone()) for optimization in self.optimizations: fpm.add(optimization) # ... rest of the code ...
d3fc9414effb4c49104cc4a0888872d9eb4c20a9
py/garage/garage/sql/utils.py
py/garage/garage/sql/utils.py
__all__ = [ 'ensure_only_one_row', 'insert_or_ignore', ] def ensure_only_one_row(rows): row = rows.fetchone() if row is None or rows.fetchone() is not None: raise KeyError return row def insert_or_ignore(conn, table, values): conn.execute(table.insert().prefix_with('OR IGNORE'), values)
__all__ = [ 'add_if_not_exists_clause', 'ensure_only_one_row', 'insert_or_ignore', ] from garage import asserts from sqlalchemy.schema import CreateIndex def add_if_not_exists_clause(index, engine): # `sqlalchemy.Index.create()` does not take `checkfirst` for reasons # that I am unaware of, and here is a hack for sidestep that. stmt = str(CreateIndex(index).compile(engine)) stmt = stmt.replace('CREATE INDEX', 'CREATE INDEX IF NOT EXISTS', 1) asserts.postcond('IF NOT EXISTS' in stmt, stmt) return stmt def ensure_only_one_row(rows): row = rows.fetchone() if row is None or rows.fetchone() is not None: raise KeyError return row def insert_or_ignore(conn, table, values): conn.execute(table.insert().prefix_with('OR IGNORE'), values)
Add a hack for appending "IF NOT EXISTS" clause to "CREATE INDEX"
Add a hack for appending "IF NOT EXISTS" clause to "CREATE INDEX"
Python
mit
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
python
## Code Before: __all__ = [ 'ensure_only_one_row', 'insert_or_ignore', ] def ensure_only_one_row(rows): row = rows.fetchone() if row is None or rows.fetchone() is not None: raise KeyError return row def insert_or_ignore(conn, table, values): conn.execute(table.insert().prefix_with('OR IGNORE'), values) ## Instruction: Add a hack for appending "IF NOT EXISTS" clause to "CREATE INDEX" ## Code After: __all__ = [ 'add_if_not_exists_clause', 'ensure_only_one_row', 'insert_or_ignore', ] from garage import asserts from sqlalchemy.schema import CreateIndex def add_if_not_exists_clause(index, engine): # `sqlalchemy.Index.create()` does not take `checkfirst` for reasons # that I am unaware of, and here is a hack for sidestep that. stmt = str(CreateIndex(index).compile(engine)) stmt = stmt.replace('CREATE INDEX', 'CREATE INDEX IF NOT EXISTS', 1) asserts.postcond('IF NOT EXISTS' in stmt, stmt) return stmt def ensure_only_one_row(rows): row = rows.fetchone() if row is None or rows.fetchone() is not None: raise KeyError return row def insert_or_ignore(conn, table, values): conn.execute(table.insert().prefix_with('OR IGNORE'), values)
... __all__ = [ 'add_if_not_exists_clause', 'ensure_only_one_row', 'insert_or_ignore', ] from garage import asserts from sqlalchemy.schema import CreateIndex def add_if_not_exists_clause(index, engine): # `sqlalchemy.Index.create()` does not take `checkfirst` for reasons # that I am unaware of, and here is a hack for sidestep that. stmt = str(CreateIndex(index).compile(engine)) stmt = stmt.replace('CREATE INDEX', 'CREATE INDEX IF NOT EXISTS', 1) asserts.postcond('IF NOT EXISTS' in stmt, stmt) return stmt def ensure_only_one_row(rows): ...
47eac4ef8acca10023f2f43dd3fea0e0abbc1202
apps/organizations/admin.py
apps/organizations/admin.py
from apps.organizations.models import Organization, OrganizationAddress from django.contrib import admin class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin)
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
Add Admin page for OrganizationMember.
Add Admin page for OrganizationMember.
Python
bsd-3-clause
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
python
## Code Before: from apps.organizations.models import Organization, OrganizationAddress from django.contrib import admin class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) ## Instruction: Add Admin page for OrganizationMember. ## Code After: from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
# ... existing code ... from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): # ... modified code ... admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember) # ... rest of the code ...
9a474cbea3a2713a94e9e5dbc0b90762b4f354c6
automated_ebs_snapshots/connection_manager.py
automated_ebs_snapshots/connection_manager.py
""" Handles connections to AWS """ import logging import sys from boto import ec2 from boto.utils import get_instance_metadata logger = logging.getLogger(__name__) def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None): """ Connect to AWS ec2 :type region: str :param region: AWS region to connect to :type access_key: str :param access_key: AWS access key id :type secret_key: str :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection """ logger.info('Connecting to AWS EC2 in {}'.format(region)) if access_key: # Connect using supplied credentials connection = ec2.connect_to_region( region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: # Fetch instance metadata metadata = get_instance_metadata(timeout=1, num_retries=1) if metadata: try: region = metadata['placement']['availability-zone'][:-1] except KeyError: pass # Connect using env vars or boto credentials connection = ec2.connect_to_region(region) if not connection: logger.error('An error occurred when connecting to EC2') sys.exit(1) return connection
""" Handles connections to AWS """ import logging import sys from boto import ec2 from boto.utils import get_instance_metadata logger = logging.getLogger(__name__) def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None): """ Connect to AWS ec2 :type region: str :param region: AWS region to connect to :type access_key: str :param access_key: AWS access key id :type secret_key: str :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection """ if access_key: # Connect using supplied credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region( region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: # Fetch instance metadata metadata = get_instance_metadata(timeout=1, num_retries=1) if metadata: try: region = metadata['placement']['availability-zone'][:-1] except KeyError: pass # Connect using env vars or boto credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region(region) if not connection: logger.error('An error occurred when connecting to EC2') sys.exit(1) return connection
Fix for logging incorrect region information when using instance role for authentication.
Fix for logging incorrect region information when using instance role for authentication.
Python
apache-2.0
bkarakashev/automated-ebs-snapshots,skymill/automated-ebs-snapshots,CBitLabs/automated-ebs-snapshots
python
## Code Before: """ Handles connections to AWS """ import logging import sys from boto import ec2 from boto.utils import get_instance_metadata logger = logging.getLogger(__name__) def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None): """ Connect to AWS ec2 :type region: str :param region: AWS region to connect to :type access_key: str :param access_key: AWS access key id :type secret_key: str :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection """ logger.info('Connecting to AWS EC2 in {}'.format(region)) if access_key: # Connect using supplied credentials connection = ec2.connect_to_region( region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: # Fetch instance metadata metadata = get_instance_metadata(timeout=1, num_retries=1) if metadata: try: region = metadata['placement']['availability-zone'][:-1] except KeyError: pass # Connect using env vars or boto credentials connection = ec2.connect_to_region(region) if not connection: logger.error('An error occurred when connecting to EC2') sys.exit(1) return connection ## Instruction: Fix for logging incorrect region information when using instance role for authentication. ## Code After: """ Handles connections to AWS """ import logging import sys from boto import ec2 from boto.utils import get_instance_metadata logger = logging.getLogger(__name__) def connect_to_ec2(region='us-east-1', access_key=None, secret_key=None): """ Connect to AWS ec2 :type region: str :param region: AWS region to connect to :type access_key: str :param access_key: AWS access key id :type secret_key: str :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection """ if access_key: # Connect using supplied credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region( region, aws_access_key_id=access_key, aws_secret_access_key=secret_key) else: # Fetch instance metadata metadata = get_instance_metadata(timeout=1, num_retries=1) if metadata: try: region = metadata['placement']['availability-zone'][:-1] except KeyError: pass # Connect using env vars or boto credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region(region) if not connection: logger.error('An error occurred when connecting to EC2') sys.exit(1) return connection
... :param secret_key: AWS secret access key :returns: boto.ec2.connection.EC2Connection -- EC2 connection """ if access_key: # Connect using supplied credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region( region, aws_access_key_id=access_key, ... pass # Connect using env vars or boto credentials logger.info('Connecting to AWS EC2 in {}'.format(region)) connection = ec2.connect_to_region(region) if not connection: ...
9ff61258ff571ce619c12db3410024d175186a61
core/src/main/java/me/prettyprint/cassandra/connection/NullOpTimer.java
core/src/main/java/me/prettyprint/cassandra/connection/NullOpTimer.java
package me.prettyprint.cassandra.connection; public class NullOpTimer implements HOpTimer { @Override public Object start() { return this; } @Override public void stop(Object token, String tagName, boolean success) { } }
package me.prettyprint.cassandra.connection; import java.io.Serializable; public class NullOpTimer implements HOpTimer, Serializable { private static final long serialVersionUID = -4762728985083933452L; @Override public Object start() { return this; } @Override public void stop(Object token, String tagName, boolean success) { } }
Make default OpTimer serializable and fix build
Make default OpTimer serializable and fix build
Java
mit
1and1/hector,apigee/hector,rantav/hector,Ursula/hector,hector-client/hector,normanmaurer/hector,koa/hector,Ursula/hector,hector-client/hector
java
## Code Before: package me.prettyprint.cassandra.connection; public class NullOpTimer implements HOpTimer { @Override public Object start() { return this; } @Override public void stop(Object token, String tagName, boolean success) { } } ## Instruction: Make default OpTimer serializable and fix build ## Code After: package me.prettyprint.cassandra.connection; import java.io.Serializable; public class NullOpTimer implements HOpTimer, Serializable { private static final long serialVersionUID = -4762728985083933452L; @Override public Object start() { return this; } @Override public void stop(Object token, String tagName, boolean success) { } }
# ... existing code ... package me.prettyprint.cassandra.connection; import java.io.Serializable; public class NullOpTimer implements HOpTimer, Serializable { private static final long serialVersionUID = -4762728985083933452L; @Override public Object start() { # ... rest of the code ...
d3f73b55dc68ec18fb5f4c43dcff59f3766d752f
mica/starcheck/__init__.py
mica/starcheck/__init__.py
from .starcheck import get_starcheck_catalog, main, get_mp_dir
from .starcheck import get_starcheck_catalog, get_starcheck_catalog_at_date, main, get_mp_dir
Add get_starcheck_catalog_at_date to top level items in mica.starcheck
Add get_starcheck_catalog_at_date to top level items in mica.starcheck
Python
bsd-3-clause
sot/mica,sot/mica
python
## Code Before: from .starcheck import get_starcheck_catalog, main, get_mp_dir ## Instruction: Add get_starcheck_catalog_at_date to top level items in mica.starcheck ## Code After: from .starcheck import get_starcheck_catalog, get_starcheck_catalog_at_date, main, get_mp_dir
// ... existing code ... from .starcheck import get_starcheck_catalog, get_starcheck_catalog_at_date, main, get_mp_dir // ... rest of the code ...
75dd98568660e589f2745c95bfcea8ddf4455fc0
OpERP/src/main/java/devopsdistilled/operp/server/data/repo/stock/StockRepository.java
OpERP/src/main/java/devopsdistilled/operp/server/data/repo/stock/StockRepository.java
package devopsdistilled.operp.server.data.repo.stock; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import devopsdistilled.operp.server.data.entity.stock.Stock; @Repository public interface StockRepository extends JpaRepository<Stock, Long> { }
package devopsdistilled.operp.server.data.repo.stock; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import devopsdistilled.operp.server.data.entity.items.Item; import devopsdistilled.operp.server.data.entity.stock.Stock; import devopsdistilled.operp.server.data.entity.stock.Warehouse; @Repository public interface StockRepository extends JpaRepository<Stock, Long> { Stock findByItemAndWarehouse(Item item,Warehouse warehouse); }
Create functionalities to find stock with item and warehouse
Create functionalities to find stock with item and warehouse
Java
mit
njmube/OpERP,DevOpsDistilled/OpERP
java
## Code Before: package devopsdistilled.operp.server.data.repo.stock; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import devopsdistilled.operp.server.data.entity.stock.Stock; @Repository public interface StockRepository extends JpaRepository<Stock, Long> { } ## Instruction: Create functionalities to find stock with item and warehouse ## Code After: package devopsdistilled.operp.server.data.repo.stock; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import devopsdistilled.operp.server.data.entity.items.Item; import devopsdistilled.operp.server.data.entity.stock.Stock; import devopsdistilled.operp.server.data.entity.stock.Warehouse; @Repository public interface StockRepository extends JpaRepository<Stock, Long> { Stock findByItemAndWarehouse(Item item,Warehouse warehouse); }
// ... existing code ... import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import devopsdistilled.operp.server.data.entity.items.Item; import devopsdistilled.operp.server.data.entity.stock.Stock; import devopsdistilled.operp.server.data.entity.stock.Warehouse; @Repository public interface StockRepository extends JpaRepository<Stock, Long> { Stock findByItemAndWarehouse(Item item,Warehouse warehouse); } // ... rest of the code ...
2b2a1848b398e59818ea7d3aa51bf7db6669917c
pytus2000/datadicts/__init__.py
pytus2000/datadicts/__init__.py
"""This subpackage contains all data dictionaries.""" # The Python source code gets auto-generated and this package is intentially empty. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label
"""This subpackage contains all data dictionaries.""" # The Python source code in this package other than this file has been auto-generated. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label
Update comment for auto generated files
Update comment for auto generated files
Python
mit
timtroendle/pytus2000
python
## Code Before: """This subpackage contains all data dictionaries.""" # The Python source code gets auto-generated and this package is intentially empty. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label ## Instruction: Update comment for auto generated files ## Code After: """This subpackage contains all data dictionaries.""" # The Python source code in this package other than this file has been auto-generated. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label
... """This subpackage contains all data dictionaries.""" # The Python source code in this package other than this file has been auto-generated. from enum import Enum ...
0a6072621570464522cbfa6d939dffccc0fa6503
spacy/cli/converters/iob2json.py
spacy/cli/converters/iob2json.py
from __future__ import unicode_literals import cytoolz from ...gold import iob_to_biluo def iob2json(input_data, n_sents=10, *args, **kwargs): """ Convert IOB files into JSON format for use with train cli. """ docs = [] for group in cytoolz.partition_all(n_sents, docs): group = list(group) first = group.pop(0) to_extend = first["paragraphs"][0]["sentences"] for sent in group[1:]: to_extend.extend(sent["paragraphs"][0]["sentences"]) docs.append(first) return docs def read_iob(raw_sents): sentences = [] for line in raw_sents: if not line.strip(): continue tokens = [t.split("|") for t in line.split()] if len(tokens[0]) == 3: words, pos, iob = zip(*tokens) else: words, iob = zip(*tokens) pos = ["-"] * len(words) biluo = iob_to_biluo(iob) sentences.append( [ {"orth": w, "tag": p, "ner": ent} for (w, p, ent) in zip(words, pos, biluo) ] ) sentences = [{"tokens": sent} for sent in sentences] paragraphs = [{"sentences": [sent]} for sent in sentences] docs = [{"id": 0, "paragraphs": [para]} for para in paragraphs] return docs
from __future__ import unicode_literals from ...gold import iob_to_biluo from ...util import minibatch def iob2json(input_data, n_sents=10, *args, **kwargs): """ Convert IOB files into JSON format for use with train cli. """ docs = [] for group in minibatch(docs, n_sents): group = list(group) first = group.pop(0) to_extend = first["paragraphs"][0]["sentences"] for sent in group[1:]: to_extend.extend(sent["paragraphs"][0]["sentences"]) docs.append(first) return docs def read_iob(raw_sents): sentences = [] for line in raw_sents: if not line.strip(): continue tokens = [t.split("|") for t in line.split()] if len(tokens[0]) == 3: words, pos, iob = zip(*tokens) else: words, iob = zip(*tokens) pos = ["-"] * len(words) biluo = iob_to_biluo(iob) sentences.append( [ {"orth": w, "tag": p, "ner": ent} for (w, p, ent) in zip(words, pos, biluo) ] ) sentences = [{"tokens": sent} for sent in sentences] paragraphs = [{"sentences": [sent]} for sent in sentences] docs = [{"id": 0, "paragraphs": [para]} for para in paragraphs] return docs
Remove cytoolz usage in CLI
Remove cytoolz usage in CLI
Python
mit
explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy
python
## Code Before: from __future__ import unicode_literals import cytoolz from ...gold import iob_to_biluo def iob2json(input_data, n_sents=10, *args, **kwargs): """ Convert IOB files into JSON format for use with train cli. """ docs = [] for group in cytoolz.partition_all(n_sents, docs): group = list(group) first = group.pop(0) to_extend = first["paragraphs"][0]["sentences"] for sent in group[1:]: to_extend.extend(sent["paragraphs"][0]["sentences"]) docs.append(first) return docs def read_iob(raw_sents): sentences = [] for line in raw_sents: if not line.strip(): continue tokens = [t.split("|") for t in line.split()] if len(tokens[0]) == 3: words, pos, iob = zip(*tokens) else: words, iob = zip(*tokens) pos = ["-"] * len(words) biluo = iob_to_biluo(iob) sentences.append( [ {"orth": w, "tag": p, "ner": ent} for (w, p, ent) in zip(words, pos, biluo) ] ) sentences = [{"tokens": sent} for sent in sentences] paragraphs = [{"sentences": [sent]} for sent in sentences] docs = [{"id": 0, "paragraphs": [para]} for para in paragraphs] return docs ## Instruction: Remove cytoolz usage in CLI ## Code After: from __future__ import unicode_literals from ...gold import iob_to_biluo from ...util import minibatch def iob2json(input_data, n_sents=10, *args, **kwargs): """ Convert IOB files into JSON format for use with train cli. """ docs = [] for group in minibatch(docs, n_sents): group = list(group) first = group.pop(0) to_extend = first["paragraphs"][0]["sentences"] for sent in group[1:]: to_extend.extend(sent["paragraphs"][0]["sentences"]) docs.append(first) return docs def read_iob(raw_sents): sentences = [] for line in raw_sents: if not line.strip(): continue tokens = [t.split("|") for t in line.split()] if len(tokens[0]) == 3: words, pos, iob = zip(*tokens) else: words, iob = zip(*tokens) pos = ["-"] * len(words) biluo = iob_to_biluo(iob) sentences.append( [ {"orth": w, "tag": p, "ner": ent} for (w, p, ent) in zip(words, pos, biluo) ] ) sentences = [{"tokens": sent} for sent in sentences] paragraphs = [{"sentences": [sent]} for sent in sentences] docs = [{"id": 0, "paragraphs": [para]} for para in paragraphs] return docs
... from __future__ import unicode_literals from ...gold import iob_to_biluo from ...util import minibatch def iob2json(input_data, n_sents=10, *args, **kwargs): ... Convert IOB files into JSON format for use with train cli. """ docs = [] for group in minibatch(docs, n_sents): group = list(group) first = group.pop(0) to_extend = first["paragraphs"][0]["sentences"] ...
5d0541f5b5b8cc18b2e3f86b237c01ed915d5c0a
dhcp2nest/util.py
dhcp2nest/util.py
from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) # Declare the helper routine def _follow_file_thread(fn, fq): # Use system tail with name-based following and retry p = Popen(["tail", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline() fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq)).start() # Return the queue return fq
from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) # Declare the helper routine def _follow_file_thread(fn, fq): # Use system tail with name-based following and retry p = Popen(["tail", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq)).start() # Return the queue return fq
Make sure that follow-file decodes utf-8 from its input
Make sure that follow-file decodes utf-8 from its input Signed-off-by: Jason Bernardino Alonso <[email protected]>
Python
mit
jbalonso/dhcp2nest
python
## Code Before: from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) # Declare the helper routine def _follow_file_thread(fn, fq): # Use system tail with name-based following and retry p = Popen(["tail", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline() fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq)).start() # Return the queue return fq ## Instruction: Make sure that follow-file decodes utf-8 from its input Signed-off-by: Jason Bernardino Alonso <[email protected]> ## Code After: from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) # Declare the helper routine def _follow_file_thread(fn, fq): # Use system tail with name-based following and retry p = Popen(["tail", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq)).start() # Return the queue return fq
// ... existing code ... # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail // ... rest of the code ...
a2713927beb4b80ba62cc0273df24d33cca4a689
namuhub/__init__.py
namuhub/__init__.py
"""namuhub --- namu.wiki contribution graph""" from flask import Flask, jsonify, render_template, request, url_for app = Flask('namuhub') @app.route('/', methods=['GET']) def index(): return render_template('index.html') @app.route('/<user>', methods=['GET']) def index_user(user=''): return render_template('index.html', **{'user': user}) @app.route('/', methods=['POST']) def namu(): user = request.POST.get('user', None) if not user: return '', 501
"""namuhub --- namu.wiki contribution graph""" import time from collections import defaultdict from datetime import timedelta from flask import Flask, jsonify, render_template, request, url_for from namuhub import namu as namuwiki app = Flask('namuhub') @app.route('/', methods=['GET']) def index(): return render_template('index.html') @app.route('/<user>', methods=['GET']) def index_user(user=''): return render_template('index.html', **{'user': user}) @app.route('/', methods=['POST']) def namu(): user = request.form.get('user', None) if not user: return jsonify({}), 501 contribs = namuwiki.contrib(user) data = defaultdict(lambda: []) # First, separate contributions into list by their activity date for contrib in contribs: date = (contrib.when - timedelta(hours=9)).date().strftime('%Y-%m-%d') data[date].append(contrib) # Convert defaultdict to dict # However, this may be inefficient but I don't care about performance at this point because it doesn't matter while it's a small project data = dict(data) # Next, we should serialize it as dict object to make sure that all the values are JSON serialiable for key, value in data.items(): value = [c.as_dict() for c in value] # Almost done, fix timezone and convert its date property to unix timestamp number that can be parsed by javascript's date object for i, c in enumerate(value): value[i]['when'] = int(time.mktime((c['when'] + timedelta(hours=9)).timetuple())) * 1000 # Overwrite existing value data[key] = value return jsonify(data)
Return namu.wiki contribution data as JSON
Return namu.wiki contribution data as JSON
Python
apache-2.0
ssut/namuhub,ssut/namuhub,ssut/namuhub
python
## Code Before: """namuhub --- namu.wiki contribution graph""" from flask import Flask, jsonify, render_template, request, url_for app = Flask('namuhub') @app.route('/', methods=['GET']) def index(): return render_template('index.html') @app.route('/<user>', methods=['GET']) def index_user(user=''): return render_template('index.html', **{'user': user}) @app.route('/', methods=['POST']) def namu(): user = request.POST.get('user', None) if not user: return '', 501 ## Instruction: Return namu.wiki contribution data as JSON ## Code After: """namuhub --- namu.wiki contribution graph""" import time from collections import defaultdict from datetime import timedelta from flask import Flask, jsonify, render_template, request, url_for from namuhub import namu as namuwiki app = Flask('namuhub') @app.route('/', methods=['GET']) def index(): return render_template('index.html') @app.route('/<user>', methods=['GET']) def index_user(user=''): return render_template('index.html', **{'user': user}) @app.route('/', methods=['POST']) def namu(): user = request.form.get('user', None) if not user: return jsonify({}), 501 contribs = namuwiki.contrib(user) data = defaultdict(lambda: []) # First, separate contributions into list by their activity date for contrib in contribs: date = (contrib.when - timedelta(hours=9)).date().strftime('%Y-%m-%d') data[date].append(contrib) # Convert defaultdict to dict # However, this may be inefficient but I don't care about performance at this point because it doesn't matter while it's a small project data = dict(data) # Next, we should serialize it as dict object to make sure that all the values are JSON serialiable for key, value in data.items(): value = [c.as_dict() for c in value] # Almost done, fix timezone and convert its date property to unix timestamp number that can be parsed by javascript's date object for i, c in enumerate(value): value[i]['when'] = int(time.mktime((c['when'] + timedelta(hours=9)).timetuple())) * 1000 # Overwrite existing value data[key] = value return jsonify(data)
# ... existing code ... """namuhub --- namu.wiki contribution graph""" import time from collections import defaultdict from datetime import timedelta from flask import Flask, jsonify, render_template, request, url_for from namuhub import namu as namuwiki app = Flask('namuhub') # ... modified code ... @app.route('/', methods=['POST']) def namu(): user = request.form.get('user', None) if not user: return jsonify({}), 501 contribs = namuwiki.contrib(user) data = defaultdict(lambda: []) # First, separate contributions into list by their activity date for contrib in contribs: date = (contrib.when - timedelta(hours=9)).date().strftime('%Y-%m-%d') data[date].append(contrib) # Convert defaultdict to dict # However, this may be inefficient but I don't care about performance at this point because it doesn't matter while it's a small project data = dict(data) # Next, we should serialize it as dict object to make sure that all the values are JSON serialiable for key, value in data.items(): value = [c.as_dict() for c in value] # Almost done, fix timezone and convert its date property to unix timestamp number that can be parsed by javascript's date object for i, c in enumerate(value): value[i]['when'] = int(time.mktime((c['when'] + timedelta(hours=9)).timetuple())) * 1000 # Overwrite existing value data[key] = value return jsonify(data) # ... rest of the code ...
dcf2dcb41e66ce01e386d526370ce23064e6e2a3
schemer/exceptions.py
schemer/exceptions.py
class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): self._message = message.format(path) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors)
class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): self._message = message.format('\"{}\"'.format(path)) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors)
Improve formatting of schema format exception messages
Improve formatting of schema format exception messages
Python
mit
gamechanger/schemer
python
## Code Before: class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): self._message = message.format(path) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors) ## Instruction: Improve formatting of schema format exception messages ## Code After: class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): self._message = message.format('\"{}\"'.format(path)) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors)
... a schema.""" def __init__(self, message, path): self._message = message.format('\"{}\"'.format(path)) self._path = path @property ...
9a632c422be355c67553bbb3cfae3e6d6be6e9a2
src/main/java/com/github/arteam/jdbi3/strategies/TimedAnnotationNameStrategy.java
src/main/java/com/github/arteam/jdbi3/strategies/TimedAnnotationNameStrategy.java
package com.github.arteam.jdbi3.strategies; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.annotation.Timed; import org.jdbi.v3.core.extension.ExtensionMethod; import org.jdbi.v3.core.statement.StatementContext; import java.lang.reflect.Method; /** * Takes into account the {@link Timed} annotation on extension methods */ public class TimedAnnotationNameStrategy implements StatementNameStrategy { @Override public String getStatementName(StatementContext statementContext) { ExtensionMethod extensionMethod = statementContext.getExtensionMethod(); if (extensionMethod != null) { final Class<?> clazz = extensionMethod.getType(); final Method method = extensionMethod.getMethod(); final Timed timed = method.getAnnotation(Timed.class); if (timed != null) { return timed.absolute() ? timed.name() : MetricRegistry.name(clazz, timed.name()); } } return null; } }
package com.github.arteam.jdbi3.strategies; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.annotation.Timed; import org.jdbi.v3.core.extension.ExtensionMethod; import org.jdbi.v3.core.statement.StatementContext; import java.lang.reflect.Method; /** * Takes into account the {@link Timed} annotation on extension methods */ public class TimedAnnotationNameStrategy implements StatementNameStrategy { @Override public String getStatementName(StatementContext statementContext) { ExtensionMethod extensionMethod = statementContext.getExtensionMethod(); if (extensionMethod != null) { final Class<?> clazz = extensionMethod.getType(); final Timed classTimed = clazz.getAnnotation(Timed.class); final Method method = extensionMethod.getMethod(); final Timed methodTimed = method.getAnnotation(Timed.class); // If the method is metered, figure out the name if (methodTimed != null) { if (methodTimed.absolute()) { return methodTimed.name(); } else { // We need to check if the class has a custom timer name return classTimed == null ? MetricRegistry.name(clazz, methodTimed.name()) : MetricRegistry.name(classTimed.name(), methodTimed.name()); } } // Maybe the class is metered? if (classTimed != null) { return MetricRegistry.name(classTimed.name(), method.getName()); } } return null; } }
Add support for annotating the class as `@Timed`
Add support for annotating the class as `@Timed`
Java
apache-2.0
arteam/metrics-jdbi3
java
## Code Before: package com.github.arteam.jdbi3.strategies; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.annotation.Timed; import org.jdbi.v3.core.extension.ExtensionMethod; import org.jdbi.v3.core.statement.StatementContext; import java.lang.reflect.Method; /** * Takes into account the {@link Timed} annotation on extension methods */ public class TimedAnnotationNameStrategy implements StatementNameStrategy { @Override public String getStatementName(StatementContext statementContext) { ExtensionMethod extensionMethod = statementContext.getExtensionMethod(); if (extensionMethod != null) { final Class<?> clazz = extensionMethod.getType(); final Method method = extensionMethod.getMethod(); final Timed timed = method.getAnnotation(Timed.class); if (timed != null) { return timed.absolute() ? timed.name() : MetricRegistry.name(clazz, timed.name()); } } return null; } } ## Instruction: Add support for annotating the class as `@Timed` ## Code After: package com.github.arteam.jdbi3.strategies; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.annotation.Timed; import org.jdbi.v3.core.extension.ExtensionMethod; import org.jdbi.v3.core.statement.StatementContext; import java.lang.reflect.Method; /** * Takes into account the {@link Timed} annotation on extension methods */ public class TimedAnnotationNameStrategy implements StatementNameStrategy { @Override public String getStatementName(StatementContext statementContext) { ExtensionMethod extensionMethod = statementContext.getExtensionMethod(); if (extensionMethod != null) { final Class<?> clazz = extensionMethod.getType(); final Timed classTimed = clazz.getAnnotation(Timed.class); final Method method = extensionMethod.getMethod(); final Timed methodTimed = method.getAnnotation(Timed.class); // If the method is metered, figure out the name if (methodTimed != null) { if (methodTimed.absolute()) { return methodTimed.name(); } else { // We need to check if the class has a custom timer name return classTimed == null ? MetricRegistry.name(clazz, methodTimed.name()) : MetricRegistry.name(classTimed.name(), methodTimed.name()); } } // Maybe the class is metered? if (classTimed != null) { return MetricRegistry.name(classTimed.name(), method.getName()); } } return null; } }
... ExtensionMethod extensionMethod = statementContext.getExtensionMethod(); if (extensionMethod != null) { final Class<?> clazz = extensionMethod.getType(); final Timed classTimed = clazz.getAnnotation(Timed.class); final Method method = extensionMethod.getMethod(); final Timed methodTimed = method.getAnnotation(Timed.class); // If the method is metered, figure out the name if (methodTimed != null) { if (methodTimed.absolute()) { return methodTimed.name(); } else { // We need to check if the class has a custom timer name return classTimed == null ? MetricRegistry.name(clazz, methodTimed.name()) : MetricRegistry.name(classTimed.name(), methodTimed.name()); } } // Maybe the class is metered? if (classTimed != null) { return MetricRegistry.name(classTimed.name(), method.getName()); } } return null; ...
e7d271c41dd713750a8224f0e8f65e2d3b119623
polyaxon/auditor/service.py
polyaxon/auditor/service.py
import activitylogs import tracker from auditor.manager import default_manager from event_manager.event_service import EventService class AuditorService(EventService): """An service that just passes the event to author services.""" event_manager = default_manager def get_event(self, event_type, instance, **kwargs): return { 'event_type': event_type, 'instance': instance, 'kwargs': kwargs } def record_event(self, event): tracker.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) activitylogs.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) def setup(self): # Load default event types import auditor.events # noqa
from auditor.manager import default_manager from event_manager.event_service import EventService class AuditorService(EventService): """An service that just passes the event to author services.""" event_manager = default_manager def __init__(self): self.tracker = None self.activitylogs = None def get_event(self, event_type, instance, **kwargs): return { 'event_type': event_type, 'instance': instance, 'kwargs': kwargs } def record_event(self, event): self.tracker.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) self.activitylogs.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) def setup(self): # Load default event types import auditor.events # noqa import activitylogs import tracker self.tracker = tracker self.activitylogs = activitylogs
Move event managers imports to setup in auditor
Move event managers imports to setup in auditor
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
python
## Code Before: import activitylogs import tracker from auditor.manager import default_manager from event_manager.event_service import EventService class AuditorService(EventService): """An service that just passes the event to author services.""" event_manager = default_manager def get_event(self, event_type, instance, **kwargs): return { 'event_type': event_type, 'instance': instance, 'kwargs': kwargs } def record_event(self, event): tracker.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) activitylogs.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) def setup(self): # Load default event types import auditor.events # noqa ## Instruction: Move event managers imports to setup in auditor ## Code After: from auditor.manager import default_manager from event_manager.event_service import EventService class AuditorService(EventService): """An service that just passes the event to author services.""" event_manager = default_manager def __init__(self): self.tracker = None self.activitylogs = None def get_event(self, event_type, instance, **kwargs): return { 'event_type': event_type, 'instance': instance, 'kwargs': kwargs } def record_event(self, event): self.tracker.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) self.activitylogs.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) def setup(self): # Load default event types import auditor.events # noqa import activitylogs import tracker self.tracker = tracker self.activitylogs = activitylogs
// ... existing code ... from auditor.manager import default_manager from event_manager.event_service import EventService // ... modified code ... event_manager = default_manager def __init__(self): self.tracker = None self.activitylogs = None def get_event(self, event_type, instance, **kwargs): return { 'event_type': event_type, ... } def record_event(self, event): self.tracker.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) self.activitylogs.record(event_type=event['event_type'], instance=event['instance'], **event['kwargs']) def setup(self): # Load default event types import auditor.events # noqa import activitylogs import tracker self.tracker = tracker self.activitylogs = activitylogs // ... rest of the code ...
13664bf72b50ab96c106585839adff4402126ce4
src/main/java/sg/ncl/service/AppConfig.java
src/main/java/sg/ncl/service/AppConfig.java
package sg.ncl.service; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; /** * @author Christopher Zhong */ @Configuration("sg.ncl.service.AppConfig") @Import({sg.ncl.service.user.AppConfig.class, sg.ncl.service.team.AppConfig.class, sg.ncl.service.version.AppConfig.class}) public class AppConfig { }
package sg.ncl.service; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import sg.ncl.service.experiment.*; /** * @author Christopher Zhong */ @Configuration("sg.ncl.service.AppConfig") @Import({sg.ncl.service.authentication.AppConfig.class, sg.ncl.service.experiment.AppConfig.class, sg.ncl.service.realization.AppConfig.class, sg.ncl.service.user.AppConfig.class, sg.ncl.service.team.AppConfig.class, sg.ncl.service.version.AppConfig.class}) public class AppConfig { }
Add missing services to root project (DEV-100)
Add missing services to root project (DEV-100)
Java
apache-2.0
nus-ncl/services-in-one,nus-ncl/services-in-one
java
## Code Before: package sg.ncl.service; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; /** * @author Christopher Zhong */ @Configuration("sg.ncl.service.AppConfig") @Import({sg.ncl.service.user.AppConfig.class, sg.ncl.service.team.AppConfig.class, sg.ncl.service.version.AppConfig.class}) public class AppConfig { } ## Instruction: Add missing services to root project (DEV-100) ## Code After: package sg.ncl.service; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import sg.ncl.service.experiment.*; /** * @author Christopher Zhong */ @Configuration("sg.ncl.service.AppConfig") @Import({sg.ncl.service.authentication.AppConfig.class, sg.ncl.service.experiment.AppConfig.class, sg.ncl.service.realization.AppConfig.class, sg.ncl.service.user.AppConfig.class, sg.ncl.service.team.AppConfig.class, sg.ncl.service.version.AppConfig.class}) public class AppConfig { }
... import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import sg.ncl.service.experiment.*; /** * @author Christopher Zhong */ @Configuration("sg.ncl.service.AppConfig") @Import({sg.ncl.service.authentication.AppConfig.class, sg.ncl.service.experiment.AppConfig.class, sg.ncl.service.realization.AppConfig.class, sg.ncl.service.user.AppConfig.class, sg.ncl.service.team.AppConfig.class, sg.ncl.service.version.AppConfig.class}) public class AppConfig { } ...
1639200e5700b1170a9d2312a32c7991ed5198b4
tests/basics/boundmeth1.py
tests/basics/boundmeth1.py
print(type(repr([].append))) class A: def f(self): return 0 def g(self, a): return a def h(self, a, b, c, d, e, f): return a + b + c + d + e + f # bound method with no extra args m = A().f print(m()) # bound method with 1 extra arg m = A().g print(m(1)) # bound method with lots of extra args m = A().h print(m(1, 2, 3, 4, 5, 6))
print(type(repr([].append))) class A: def f(self): return 0 def g(self, a): return a def h(self, a, b, c, d, e, f): return a + b + c + d + e + f # bound method with no extra args m = A().f print(m()) # bound method with 1 extra arg m = A().g print(m(1)) # bound method with lots of extra args m = A().h print(m(1, 2, 3, 4, 5, 6)) # can't assign attributes to a bound method try: A().f.x = 1 except AttributeError: print('AttributeError')
Add test for assignment of attribute to bound method.
tests/basics: Add test for assignment of attribute to bound method.
Python
mit
ryannathans/micropython,bvernoux/micropython,HenrikSolver/micropython,dmazzella/micropython,lowRISC/micropython,toolmacher/micropython,ryannathans/micropython,cwyark/micropython,deshipu/micropython,mhoffma/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,MrSurly/micropython,tralamazza/micropython,alex-robbins/micropython,chrisdearman/micropython,adafruit/circuitpython,trezor/micropython,deshipu/micropython,adafruit/circuitpython,tobbad/micropython,dmazzella/micropython,PappaPeppar/micropython,puuu/micropython,MrSurly/micropython-esp32,blazewicz/micropython,MrSurly/micropython,selste/micropython,swegener/micropython,tralamazza/micropython,mhoffma/micropython,AriZuu/micropython,PappaPeppar/micropython,lowRISC/micropython,henriknelson/micropython,torwag/micropython,puuu/micropython,toolmacher/micropython,toolmacher/micropython,kerneltask/micropython,mhoffma/micropython,deshipu/micropython,HenrikSolver/micropython,Peetz0r/micropython-esp32,mhoffma/micropython,tobbad/micropython,pozetroninc/micropython,toolmacher/micropython,AriZuu/micropython,ryannathans/micropython,hiway/micropython,pozetroninc/micropython,mhoffma/micropython,swegener/micropython,swegener/micropython,blazewicz/micropython,trezor/micropython,selste/micropython,HenrikSolver/micropython,adafruit/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,alex-robbins/micropython,PappaPeppar/micropython,oopy/micropython,MrSurly/micropython-esp32,adafruit/micropython,TDAbboud/micropython,adafruit/circuitpython,tobbad/micropython,infinnovation/micropython,infinnovation/micropython,alex-robbins/micropython,henriknelson/micropython,pfalcon/micropython,ryannathans/micropython,adafruit/circuitpython,adafruit/circuitpython,ryannathans/micropython,MrSurly/micropython-esp32,infinnovation/micropython,TDAbboud/micropython,pozetroninc/micropython,lowRISC/micropython,adafruit/micropython,MrSurly/micropython,hiway/micropython,Peetz0r/micropython-esp32,tobbad/micropython,MrSurly/micropython,micropython/micropython-esp32,tralamazza/micropython,pramasoul/micropython,Timmenem/micropython,pfalcon/micropython,micropython/micropython-esp32,bvernoux/micropython,henriknelson/micropython,chrisdearman/micropython,adafruit/circuitpython,Timmenem/micropython,torwag/micropython,micropython/micropython-esp32,AriZuu/micropython,dmazzella/micropython,pramasoul/micropython,selste/micropython,tralamazza/micropython,lowRISC/micropython,puuu/micropython,AriZuu/micropython,SHA2017-badge/micropython-esp32,alex-robbins/micropython,adafruit/micropython,HenrikSolver/micropython,pramasoul/micropython,kerneltask/micropython,cwyark/micropython,trezor/micropython,henriknelson/micropython,tobbad/micropython,hiway/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,AriZuu/micropython,bvernoux/micropython,TDAbboud/micropython,swegener/micropython,adafruit/micropython,selste/micropython,SHA2017-badge/micropython-esp32,Timmenem/micropython,hiway/micropython,MrSurly/micropython,infinnovation/micropython,deshipu/micropython,pozetroninc/micropython,deshipu/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,MrSurly/micropython-esp32,trezor/micropython,trezor/micropython,SHA2017-badge/micropython-esp32,torwag/micropython,pramasoul/micropython,chrisdearman/micropython,infinnovation/micropython,blazewicz/micropython,TDAbboud/micropython,pramasoul/micropython,oopy/micropython,Peetz0r/micropython-esp32,micropython/micropython-esp32,pfalcon/micropython,kerneltask/micropython,TDAbboud/micropython,chrisdearman/micropython,torwag/micropython,blazewicz/micropython,pozetroninc/micropython,cwyark/micropython,pfalcon/micropython,kerneltask/micropython,cwyark/micropython,oopy/micropython,Timmenem/micropython,toolmacher/micropython,pfalcon/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,puuu/micropython,bvernoux/micropython,blazewicz/micropython,oopy/micropython,chrisdearman/micropython,kerneltask/micropython,swegener/micropython,oopy/micropython,torwag/micropython,PappaPeppar/micropython,bvernoux/micropython,alex-robbins/micropython,hiway/micropython,cwyark/micropython,selste/micropython,lowRISC/micropython,dmazzella/micropython
python
## Code Before: print(type(repr([].append))) class A: def f(self): return 0 def g(self, a): return a def h(self, a, b, c, d, e, f): return a + b + c + d + e + f # bound method with no extra args m = A().f print(m()) # bound method with 1 extra arg m = A().g print(m(1)) # bound method with lots of extra args m = A().h print(m(1, 2, 3, 4, 5, 6)) ## Instruction: tests/basics: Add test for assignment of attribute to bound method. ## Code After: print(type(repr([].append))) class A: def f(self): return 0 def g(self, a): return a def h(self, a, b, c, d, e, f): return a + b + c + d + e + f # bound method with no extra args m = A().f print(m()) # bound method with 1 extra arg m = A().g print(m(1)) # bound method with lots of extra args m = A().h print(m(1, 2, 3, 4, 5, 6)) # can't assign attributes to a bound method try: A().f.x = 1 except AttributeError: print('AttributeError')
... # bound method with lots of extra args m = A().h print(m(1, 2, 3, 4, 5, 6)) # can't assign attributes to a bound method try: A().f.x = 1 except AttributeError: print('AttributeError') ...
98fb383fab222e8c7f13b79537bd902f5acb8145
src/java/test/org/jaxen/javabean/DocumentNavigatorTest.java
src/java/test/org/jaxen/javabean/DocumentNavigatorTest.java
package org.jaxen.javabean; import junit.framework.TestCase; public class DocumentNavigatorTest extends TestCase { public void testNothing() throws Exception { JavaBeanXPath xpath = new JavaBeanXPath( "brother[position()<4]/name" ); Person bob = new Person( "bob", 30 ); bob.addBrother( new Person( "billy", 34 ) ); bob.addBrother( new Person( "seth", 29 ) ); bob.addBrother( new Person( "dave", 32 ) ); bob.addBrother( new Person( "jim", 29 ) ); bob.addBrother( new Person( "larry", 42 ) ); bob.addBrother( new Person( "ted", 22 ) ); System.err.println( xpath.evaluate( bob ) ); } }
package org.jaxen.javabean; import junit.framework.TestCase; import org.jaxen.saxpath.helpers.XPathReaderFactory; public class DocumentNavigatorTest extends TestCase { protected void setUp() throws Exception { System.setProperty( XPathReaderFactory.DRIVER_PROPERTY, "" ); } public void testNothing() throws Exception { JavaBeanXPath xpath = new JavaBeanXPath( "brother[position()<4]/name" ); Person bob = new Person( "bob", 30 ); bob.addBrother( new Person( "billy", 34 ) ); bob.addBrother( new Person( "seth", 29 ) ); bob.addBrother( new Person( "dave", 32 ) ); bob.addBrother( new Person( "jim", 29 ) ); bob.addBrother( new Person( "larry", 42 ) ); bob.addBrother( new Person( "ted", 22 ) ); System.err.println( xpath.evaluate( bob ) ); } }
Set property in case it has been set wrong previously
Set property in case it has been set wrong previously
Java
bsd-3-clause
jenkinsci/jaxen
java
## Code Before: package org.jaxen.javabean; import junit.framework.TestCase; public class DocumentNavigatorTest extends TestCase { public void testNothing() throws Exception { JavaBeanXPath xpath = new JavaBeanXPath( "brother[position()<4]/name" ); Person bob = new Person( "bob", 30 ); bob.addBrother( new Person( "billy", 34 ) ); bob.addBrother( new Person( "seth", 29 ) ); bob.addBrother( new Person( "dave", 32 ) ); bob.addBrother( new Person( "jim", 29 ) ); bob.addBrother( new Person( "larry", 42 ) ); bob.addBrother( new Person( "ted", 22 ) ); System.err.println( xpath.evaluate( bob ) ); } } ## Instruction: Set property in case it has been set wrong previously ## Code After: package org.jaxen.javabean; import junit.framework.TestCase; import org.jaxen.saxpath.helpers.XPathReaderFactory; public class DocumentNavigatorTest extends TestCase { protected void setUp() throws Exception { System.setProperty( XPathReaderFactory.DRIVER_PROPERTY, "" ); } public void testNothing() throws Exception { JavaBeanXPath xpath = new JavaBeanXPath( "brother[position()<4]/name" ); Person bob = new Person( "bob", 30 ); bob.addBrother( new Person( "billy", 34 ) ); bob.addBrother( new Person( "seth", 29 ) ); bob.addBrother( new Person( "dave", 32 ) ); bob.addBrother( new Person( "jim", 29 ) ); bob.addBrother( new Person( "larry", 42 ) ); bob.addBrother( new Person( "ted", 22 ) ); System.err.println( xpath.evaluate( bob ) ); } }
... package org.jaxen.javabean; import junit.framework.TestCase; import org.jaxen.saxpath.helpers.XPathReaderFactory; public class DocumentNavigatorTest extends TestCase { protected void setUp() throws Exception { System.setProperty( XPathReaderFactory.DRIVER_PROPERTY, "" ); } public void testNothing() throws Exception ...
3c9b49ef968c7e59028eb0bda78b1474a49339f3
numscons/tools/intel_common/common.py
numscons/tools/intel_common/common.py
_ARG2ABI = {'x86': 'ia32', 'amd64': 'em64t', 'default': 'ia32'} def get_abi(env): try: abi = env['ICC_ABI'] except KeyError: abi = 'default' try: return _ARG2ABI[abi] except KeyError: ValueError("Unknown abi %s" % abi)
_ARG2ABI = {'x86': 'ia32', 'amd64': 'em64t', 'default': 'ia32'} def get_abi(env, lang='C'): if lang == 'C' or lang == 'CXX': try: abi = env['ICC_ABI'] except KeyError: abi = 'default' elif lang == 'FORTRAN': try: abi = env['IFORT_ABI'] except KeyError: abi = 'default' try: return _ARG2ABI[abi] except KeyError: ValueError("Unknown abi %s" % abi)
Add a language argument to get abi for intel tools.
Add a language argument to get abi for intel tools.
Python
bsd-3-clause
cournape/numscons,cournape/numscons,cournape/numscons
python
## Code Before: _ARG2ABI = {'x86': 'ia32', 'amd64': 'em64t', 'default': 'ia32'} def get_abi(env): try: abi = env['ICC_ABI'] except KeyError: abi = 'default' try: return _ARG2ABI[abi] except KeyError: ValueError("Unknown abi %s" % abi) ## Instruction: Add a language argument to get abi for intel tools. ## Code After: _ARG2ABI = {'x86': 'ia32', 'amd64': 'em64t', 'default': 'ia32'} def get_abi(env, lang='C'): if lang == 'C' or lang == 'CXX': try: abi = env['ICC_ABI'] except KeyError: abi = 'default' elif lang == 'FORTRAN': try: abi = env['IFORT_ABI'] except KeyError: abi = 'default' try: return _ARG2ABI[abi] except KeyError: ValueError("Unknown abi %s" % abi)
... _ARG2ABI = {'x86': 'ia32', 'amd64': 'em64t', 'default': 'ia32'} def get_abi(env, lang='C'): if lang == 'C' or lang == 'CXX': try: abi = env['ICC_ABI'] except KeyError: abi = 'default' elif lang == 'FORTRAN': try: abi = env['IFORT_ABI'] except KeyError: abi = 'default' try: return _ARG2ABI[abi] ...
58120c937e04357f6fbdcf1431f69fe7a38aacb2
app/mod_budget/model.py
app/mod_budget/model.py
from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) class CategoryBudget(db.Document): # The amount of the budget. amount = db.DecimalField(precision = 2, required = True) # The category. category = db.ReferenceField(Category, required = True) def sumEntries(): return sum([entry.amount for entry in Entry.objects if entry.amount > 0])
from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Income(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) class Expense(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the expense. description = db.StringField(required = True) # The owner of the expense. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) class CategoryBudget(db.Document): # The amount of the budget. amount = db.DecimalField(precision = 2, required = True) # The category. category = db.ReferenceField(Category, required = True) def sumEntries(): return sum([entry.amount for entry in Entry.objects if entry.amount > 0])
Split Entry into Income and Expense schemes
Split Entry into Income and Expense schemes Splitting the Entry schema into two seperate schemes allows us to use different collections to store them, which in turn makes our work easier later on.
Python
mit
Zillolo/mana-vault,Zillolo/mana-vault,Zillolo/mana-vault
python
## Code Before: from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Entry(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) class CategoryBudget(db.Document): # The amount of the budget. amount = db.DecimalField(precision = 2, required = True) # The category. category = db.ReferenceField(Category, required = True) def sumEntries(): return sum([entry.amount for entry in Entry.objects if entry.amount > 0]) ## Instruction: Split Entry into Income and Expense schemes Splitting the Entry schema into two seperate schemes allows us to use different collections to store them, which in turn makes our work easier later on. ## Code After: from app import db from app.mod_auth.model import User class Category(db.Document): # The name of the category. name = db.StringField(required = True) class Income(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the entry. description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) class Expense(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the expense. description = db.StringField(required = True) # The owner of the expense. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) class CategoryBudget(db.Document): # The amount of the budget. amount = db.DecimalField(precision = 2, required = True) # The category. category = db.ReferenceField(Category, required = True) def sumEntries(): return sum([entry.amount for entry in Entry.objects if entry.amount > 0])
... # The name of the category. name = db.StringField(required = True) class Income(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) ... description = db.StringField(required = True) # The owner of the entry. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) # The category of this entry. category = db.ReferenceField(Category, required = True) class Expense(db.Document): # The amount of the entry. amount = db.DecimalField(precision = 2, required = True) # A short description for the expense. description = db.StringField(required = True) # The owner of the expense. # Should the owner be deleted, we also want to delete all of his entries. owner = db.ReferenceField(User, reverse_delete_rule = db.CASCADE, required = True) ...
281208f9ecfa3f5f5028df75fff86f1cdb752487
jasylibrary.py
jasylibrary.py
import sys, os.path, inspect filename = inspect.getframeinfo(inspect.currentframe()).filename path = os.path.dirname(os.path.abspath(filename)) sys.path.append(path) import konstrukteur.Konstrukteur @share def build(regenerate = False): """ Build static website """ konstrukteur.Konstrukteur.build(regenerate)
import sys, os.path, inspect filename = inspect.getframeinfo(inspect.currentframe()).filename path = os.path.dirname(os.path.abspath(filename)) sys.path.append(path) import konstrukteur.Konstrukteur import jasy.asset.Manager @share def build(profile, regenerate = False): """ Build static website """ def getPartUrl(part, type): folder = "" if type == "css": folder = profile.getCssFolder() outputPath = folder #os.path.join(profile.getDestinationPath(), folder) filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type)) return filename session.addCommand("part.url", getPartUrl, "url") for permutation in profile.permutate(): konstrukteur.Konstrukteur.build(regenerate, profile)
Add support for part loading
Add support for part loading
Python
mit
fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur
python
## Code Before: import sys, os.path, inspect filename = inspect.getframeinfo(inspect.currentframe()).filename path = os.path.dirname(os.path.abspath(filename)) sys.path.append(path) import konstrukteur.Konstrukteur @share def build(regenerate = False): """ Build static website """ konstrukteur.Konstrukteur.build(regenerate) ## Instruction: Add support for part loading ## Code After: import sys, os.path, inspect filename = inspect.getframeinfo(inspect.currentframe()).filename path = os.path.dirname(os.path.abspath(filename)) sys.path.append(path) import konstrukteur.Konstrukteur import jasy.asset.Manager @share def build(profile, regenerate = False): """ Build static website """ def getPartUrl(part, type): folder = "" if type == "css": folder = profile.getCssFolder() outputPath = folder #os.path.join(profile.getDestinationPath(), folder) filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type)) return filename session.addCommand("part.url", getPartUrl, "url") for permutation in profile.permutate(): konstrukteur.Konstrukteur.build(regenerate, profile)
# ... existing code ... sys.path.append(path) import konstrukteur.Konstrukteur import jasy.asset.Manager @share def build(profile, regenerate = False): """ Build static website """ def getPartUrl(part, type): folder = "" if type == "css": folder = profile.getCssFolder() outputPath = folder #os.path.join(profile.getDestinationPath(), folder) filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type)) return filename session.addCommand("part.url", getPartUrl, "url") for permutation in profile.permutate(): konstrukteur.Konstrukteur.build(regenerate, profile) # ... rest of the code ...
96776b33fcf2ebbd2de4748c38d8be9c63fdec71
tests/conftest.py
tests/conftest.py
from django.conf import settings from django.core.management import call_command def pytest_configure(): settings.configure( ROOT_URLCONF='tests.urls', ALLOWED_HOSTS=['testserver'], DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test' } }, INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'rest_framework', 'tests', ] ) try: from django import setup except ImportError: call_command('syncdb', '--noinput') else: setup() call_command('migrate')
from django.conf import settings from django.core.management import call_command def pytest_configure(): settings.configure( ROOT_URLCONF='tests.urls', ALLOWED_HOSTS=['testserver'], DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test' } }, INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'rest_framework', 'tests', ] ) try: from django import setup except ImportError: call_command('syncdb', interactive=False) else: setup() call_command('migrate')
Replace --noinput with interactive=False in syncdb command call
Replace --noinput with interactive=False in syncdb command call
Python
bsd-2-clause
yprez/django-rest-assured,pombredanne/django-rest-assured,ydaniv/django-rest-assured
python
## Code Before: from django.conf import settings from django.core.management import call_command def pytest_configure(): settings.configure( ROOT_URLCONF='tests.urls', ALLOWED_HOSTS=['testserver'], DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test' } }, INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'rest_framework', 'tests', ] ) try: from django import setup except ImportError: call_command('syncdb', '--noinput') else: setup() call_command('migrate') ## Instruction: Replace --noinput with interactive=False in syncdb command call ## Code After: from django.conf import settings from django.core.management import call_command def pytest_configure(): settings.configure( ROOT_URLCONF='tests.urls', ALLOWED_HOSTS=['testserver'], DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'test' } }, INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'rest_framework', 'tests', ] ) try: from django import setup except ImportError: call_command('syncdb', interactive=False) else: setup() call_command('migrate')
# ... existing code ... try: from django import setup except ImportError: call_command('syncdb', interactive=False) else: setup() call_command('migrate') # ... rest of the code ...
c72f021248eaf2b969967eb8663e72f888c5fba7
admin/preprints/serializers.py
admin/preprints/serializers.py
from website.project.taxonomies import Subject from admin.nodes.serializers import serialize_node def serialize_preprint(preprint): return { 'id': preprint._id, 'date_created': preprint.date_created, 'modified': preprint.date_modified, 'provider': preprint.provider, 'node': serialize_node(preprint.node), 'is_published': preprint.is_published, 'date_published': preprint.date_published, 'subjects': serialize_subjects(preprint.subjects), } def serialize_subjects(subjects): serialized_subjects = [] for subject in subjects: subject = Subject.load(subject[0]) serialized_subjects.append({ 'id': subject._id, 'text': subject.text }) return serialized_subjects
from website.project.taxonomies import Subject from admin.nodes.serializers import serialize_node def serialize_preprint(preprint): return { 'id': preprint._id, 'date_created': preprint.date_created, 'modified': preprint.date_modified, 'provider': preprint.provider, 'node': serialize_node(preprint.node), 'is_published': preprint.is_published, 'date_published': preprint.date_published, 'subjects': serialize_subjects(preprint.subjects), } def serialize_subjects(subjects): serialized_subjects = [] for subject in subjects: if len(subject) == 1: subject = Subject.load(subject[0]) if subject: serialized_subjects.append({ 'id': subject._id, 'text': subject.text }) return serialized_subjects
Add a bit of subject error handling just in case
Add a bit of subject error handling just in case
Python
apache-2.0
cslzchen/osf.io,Johnetordoff/osf.io,hmoco/osf.io,adlius/osf.io,pattisdr/osf.io,icereval/osf.io,cslzchen/osf.io,chrisseto/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,cslzchen/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,adlius/osf.io,HalcyonChimera/osf.io,felliott/osf.io,saradbowman/osf.io,erinspace/osf.io,mattclark/osf.io,caneruguz/osf.io,baylee-d/osf.io,felliott/osf.io,pattisdr/osf.io,chennan47/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,leb2dg/osf.io,baylee-d/osf.io,mfraezz/osf.io,mfraezz/osf.io,pattisdr/osf.io,erinspace/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,aaxelb/osf.io,chrisseto/osf.io,Nesiehr/osf.io,caneruguz/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,mattclark/osf.io,chrisseto/osf.io,felliott/osf.io,Johnetordoff/osf.io,binoculars/osf.io,leb2dg/osf.io,laurenrevere/osf.io,hmoco/osf.io,sloria/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,sloria/osf.io,TomBaxter/osf.io,adlius/osf.io,caseyrollins/osf.io,erinspace/osf.io,binoculars/osf.io,caseyrollins/osf.io,icereval/osf.io,sloria/osf.io,aaxelb/osf.io,aaxelb/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,crcresearch/osf.io,aaxelb/osf.io,hmoco/osf.io,mattclark/osf.io,caneruguz/osf.io,icereval/osf.io,leb2dg/osf.io,felliott/osf.io,baylee-d/osf.io,adlius/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io
python
## Code Before: from website.project.taxonomies import Subject from admin.nodes.serializers import serialize_node def serialize_preprint(preprint): return { 'id': preprint._id, 'date_created': preprint.date_created, 'modified': preprint.date_modified, 'provider': preprint.provider, 'node': serialize_node(preprint.node), 'is_published': preprint.is_published, 'date_published': preprint.date_published, 'subjects': serialize_subjects(preprint.subjects), } def serialize_subjects(subjects): serialized_subjects = [] for subject in subjects: subject = Subject.load(subject[0]) serialized_subjects.append({ 'id': subject._id, 'text': subject.text }) return serialized_subjects ## Instruction: Add a bit of subject error handling just in case ## Code After: from website.project.taxonomies import Subject from admin.nodes.serializers import serialize_node def serialize_preprint(preprint): return { 'id': preprint._id, 'date_created': preprint.date_created, 'modified': preprint.date_modified, 'provider': preprint.provider, 'node': serialize_node(preprint.node), 'is_published': preprint.is_published, 'date_published': preprint.date_published, 'subjects': serialize_subjects(preprint.subjects), } def serialize_subjects(subjects): serialized_subjects = [] for subject in subjects: if len(subject) == 1: subject = Subject.load(subject[0]) if subject: serialized_subjects.append({ 'id': subject._id, 'text': subject.text }) return serialized_subjects
# ... existing code ... def serialize_subjects(subjects): serialized_subjects = [] for subject in subjects: if len(subject) == 1: subject = Subject.load(subject[0]) if subject: serialized_subjects.append({ 'id': subject._id, 'text': subject.text }) return serialized_subjects # ... rest of the code ...
b103c02815a7819e9cb4f1cc0061202cfcfd0fa6
bidb/api/views.py
bidb/api/views.py
from django.conf import settings from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_http_methods from .utils import parse_submission, InvalidSubmission @csrf_exempt @require_http_methods(['PUT']) def submit(request): try: submission, created = parse_submission(request) except InvalidSubmission as exc: return HttpResponseBadRequest("{}\n".format(exc)) return HttpResponse('{}{}\n'.format( settings.SITE_URL, submission.buildinfo.get_absolute_url(), ), status=201 if created else 200)
from django.conf import settings from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_http_methods from .utils import parse_submission, InvalidSubmission @csrf_exempt @require_http_methods(['PUT']) def submit(request): try: submission, created = parse_submission(request) except InvalidSubmission as exc: return HttpResponseBadRequest("Rejecting submission: {}\n".format(exc)) return HttpResponse('{}{}\n'.format( settings.SITE_URL, submission.buildinfo.get_absolute_url(), ), status=201 if created else 200)
Make it clearer that we are rejecting the submission.
Make it clearer that we are rejecting the submission.
Python
agpl-3.0
lamby/buildinfo.debian.net,lamby/buildinfo.debian.net
python
## Code Before: from django.conf import settings from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_http_methods from .utils import parse_submission, InvalidSubmission @csrf_exempt @require_http_methods(['PUT']) def submit(request): try: submission, created = parse_submission(request) except InvalidSubmission as exc: return HttpResponseBadRequest("{}\n".format(exc)) return HttpResponse('{}{}\n'.format( settings.SITE_URL, submission.buildinfo.get_absolute_url(), ), status=201 if created else 200) ## Instruction: Make it clearer that we are rejecting the submission. ## Code After: from django.conf import settings from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_http_methods from .utils import parse_submission, InvalidSubmission @csrf_exempt @require_http_methods(['PUT']) def submit(request): try: submission, created = parse_submission(request) except InvalidSubmission as exc: return HttpResponseBadRequest("Rejecting submission: {}\n".format(exc)) return HttpResponse('{}{}\n'.format( settings.SITE_URL, submission.buildinfo.get_absolute_url(), ), status=201 if created else 200)
// ... existing code ... try: submission, created = parse_submission(request) except InvalidSubmission as exc: return HttpResponseBadRequest("Rejecting submission: {}\n".format(exc)) return HttpResponse('{}{}\n'.format( settings.SITE_URL, // ... rest of the code ...
1ba14774b1ed483f512562ab83f91fab8b843db7
nazs/web/core/blocks.py
nazs/web/core/blocks.py
from django.utils.translation import ugettext as _ from achilles import blocks, tables import nazs register = blocks.Library('core') @register.block(template_name='web/core/welcome.html') def home(): return {'version': nazs.__version__} @register.block(template_name='web/core/apply_button.html') def apply_button(): return {'active': nazs.changed()} @register.block('modules') class Modules(tables.Table): id_field = 'name' # Module name name = tables.Column(verbose_name=_('Module')) # Module status status = tables.MergeColumn( verbose_name=_('Status'), columns=( ('install', tables.ActionColumn(verbose_name=_('Install'), action='core:install_module', visible=lambda m: not m.installed)), ('enable', tables.ActionColumn(verbose_name=_('Enable'), action='core:enable_module', visible=lambda m: m.installed and not m.enabled)), ('disable', tables.ActionColumn(verbose_name=_('Disable'), action='core:disable_module', visible=lambda m: m.installed and m.enabled)), ) ) def objects(self): return nazs.modules() def get_object(self, name): for module in nazs.modules(): if module.name == name: return module raise KeyError('Module %s not found' % name)
from django.utils.translation import ugettext as _ from achilles import blocks, tables import nazs register = blocks.Library('core') @register.block(template_name='web/core/welcome.html') def home(): return {'version': nazs.__version__} @register.block(template_name='web/core/apply_button.html') def apply_button(): return {'active': nazs.changed()} @register.block('modules') class Modules(tables.Table): id_field = 'name' # Module name name = tables.Column(verbose_name=_('Module')) # Module status status = tables.MergeColumn( verbose_name=_('Status'), columns=( ('install', tables.ActionColumn(verbose_name=_('Install'), action='core:install_module', classes='btn btn-sm btn-primary', visible=lambda m: not m.installed)), ('enable', tables.ActionColumn(verbose_name=_('Enable'), action='core:enable_module', classes='btn btn-sm btn-success', visible=lambda m: m.installed and not m.enabled)), ('disable', tables.ActionColumn(verbose_name=_('Disable'), action='core:disable_module', classes='btn btn-sm btn-danger', visible=lambda m: m.installed and m.enabled)), ) ) def objects(self): return nazs.modules() def get_object(self, name): for module in nazs.modules(): if module.name == name: return module raise KeyError('Module %s not found' % name)
Add proper css classes to action buttons
Add proper css classes to action buttons
Python
agpl-3.0
exekias/droplet,exekias/droplet,exekias/droplet
python
## Code Before: from django.utils.translation import ugettext as _ from achilles import blocks, tables import nazs register = blocks.Library('core') @register.block(template_name='web/core/welcome.html') def home(): return {'version': nazs.__version__} @register.block(template_name='web/core/apply_button.html') def apply_button(): return {'active': nazs.changed()} @register.block('modules') class Modules(tables.Table): id_field = 'name' # Module name name = tables.Column(verbose_name=_('Module')) # Module status status = tables.MergeColumn( verbose_name=_('Status'), columns=( ('install', tables.ActionColumn(verbose_name=_('Install'), action='core:install_module', visible=lambda m: not m.installed)), ('enable', tables.ActionColumn(verbose_name=_('Enable'), action='core:enable_module', visible=lambda m: m.installed and not m.enabled)), ('disable', tables.ActionColumn(verbose_name=_('Disable'), action='core:disable_module', visible=lambda m: m.installed and m.enabled)), ) ) def objects(self): return nazs.modules() def get_object(self, name): for module in nazs.modules(): if module.name == name: return module raise KeyError('Module %s not found' % name) ## Instruction: Add proper css classes to action buttons ## Code After: from django.utils.translation import ugettext as _ from achilles import blocks, tables import nazs register = blocks.Library('core') @register.block(template_name='web/core/welcome.html') def home(): return {'version': nazs.__version__} @register.block(template_name='web/core/apply_button.html') def apply_button(): return {'active': nazs.changed()} @register.block('modules') class Modules(tables.Table): id_field = 'name' # Module name name = tables.Column(verbose_name=_('Module')) # Module status status = tables.MergeColumn( verbose_name=_('Status'), columns=( ('install', tables.ActionColumn(verbose_name=_('Install'), action='core:install_module', classes='btn btn-sm btn-primary', visible=lambda m: not m.installed)), ('enable', tables.ActionColumn(verbose_name=_('Enable'), action='core:enable_module', classes='btn btn-sm btn-success', visible=lambda m: m.installed and not m.enabled)), ('disable', tables.ActionColumn(verbose_name=_('Disable'), action='core:disable_module', classes='btn btn-sm btn-danger', visible=lambda m: m.installed and m.enabled)), ) ) def objects(self): return nazs.modules() def get_object(self, name): for module in nazs.modules(): if module.name == name: return module raise KeyError('Module %s not found' % name)
... columns=( ('install', tables.ActionColumn(verbose_name=_('Install'), action='core:install_module', classes='btn btn-sm btn-primary', visible=lambda m: not m.installed)), ('enable', tables.ActionColumn(verbose_name=_('Enable'), action='core:enable_module', classes='btn btn-sm btn-success', visible=lambda m: m.installed and not m.enabled)), ('disable', tables.ActionColumn(verbose_name=_('Disable'), action='core:disable_module', classes='btn btn-sm btn-danger', visible=lambda m: m.installed and m.enabled)), ) ...
3b75a6f3654e8f325060779ca56b6df93fe0cabe
genome_designer/main/demo_view_overrides.py
genome_designer/main/demo_view_overrides.py
from django.contrib.auth import authenticate from django.contrib.auth import login from django.http import HttpResponseRedirect def login_demo_account(request): new_user = authenticate(username='gmcdev', password='g3n3d3z') login(request, new_user) return HttpResponseRedirect("/")
from django.contrib.auth import authenticate from django.contrib.auth import login from django.http import HttpResponseRedirect def login_demo_account(request): new_user = authenticate(username='gmcdev', password='g3n3d3z') login(request, new_user) redirect_url = request.GET.get('next', '/') return HttpResponseRedirect(redirect_url)
Handle redirect_url in demo login bypass.
Handle redirect_url in demo login bypass.
Python
mit
churchlab/millstone,churchlab/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone,woodymit/millstone,churchlab/millstone
python
## Code Before: from django.contrib.auth import authenticate from django.contrib.auth import login from django.http import HttpResponseRedirect def login_demo_account(request): new_user = authenticate(username='gmcdev', password='g3n3d3z') login(request, new_user) return HttpResponseRedirect("/") ## Instruction: Handle redirect_url in demo login bypass. ## Code After: from django.contrib.auth import authenticate from django.contrib.auth import login from django.http import HttpResponseRedirect def login_demo_account(request): new_user = authenticate(username='gmcdev', password='g3n3d3z') login(request, new_user) redirect_url = request.GET.get('next', '/') return HttpResponseRedirect(redirect_url)
# ... existing code ... new_user = authenticate(username='gmcdev', password='g3n3d3z') login(request, new_user) redirect_url = request.GET.get('next', '/') return HttpResponseRedirect(redirect_url) # ... rest of the code ...
458fd49fdf73f5cc338c58b1e741fde42f2f7251
exampleapp/models.py
exampleapp/models.py
from galleries.models import Gallery, ImageModel from django.db import models from imagekit.models import ImageSpec from imagekit.processors.resize import Fit class Photo(ImageModel): thumbnail = ImageSpec([Fit(50, 50)]) full = ImageSpec([Fit(400, 200)]) caption = models.CharField(max_length=100) class PortfolioImage(ImageModel): thumbnail = ImageSpec([Fit(70, 40)]) class Video(models.Model): title = models.CharField(max_length=50) video = models.FileField(upload_to='galleries/video/video') thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True) def __unicode__(self): return self.title class Meta: ordering = ['title'] class PhotoAlbum(Gallery): class GalleryMeta: member_models = [Photo] class Meta: verbose_name = 'Photo Album' class Portfolio(Gallery): class GalleryMeta: member_models = [Video] membership_class = 'PortfolioMembership' class PortfolioMembership(Portfolio.BaseMembership): extra_field = models.CharField(max_length=10)
from galleries.models import Gallery, ImageModel from django.db import models from imagekit.models import ImageSpec from imagekit.processors import ResizeToFit class Photo(ImageModel): thumbnail = ImageSpec([ResizeToFit(50, 50)]) full = ImageSpec([ResizeToFit(400, 200)]) caption = models.CharField(max_length=100) class PortfolioImage(ImageModel): thumbnail = ImageSpec([ResizeToFit(70, 40)]) class Video(models.Model): title = models.CharField(max_length=50) video = models.FileField(upload_to='galleries/video/video') thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True) def __unicode__(self): return self.title class Meta: ordering = ['title'] class PhotoAlbum(Gallery): class GalleryMeta: member_models = [Photo] class Meta: verbose_name = 'Photo Album' class Portfolio(Gallery): class GalleryMeta: member_models = [Video] membership_class = 'PortfolioMembership' class PortfolioMembership(Portfolio.BaseMembership): extra_field = models.CharField(max_length=10)
Use (not so) new processor class names
Use (not so) new processor class names
Python
mit
hzdg/django-galleries,hzdg/django-galleries,hzdg/django-galleries
python
## Code Before: from galleries.models import Gallery, ImageModel from django.db import models from imagekit.models import ImageSpec from imagekit.processors.resize import Fit class Photo(ImageModel): thumbnail = ImageSpec([Fit(50, 50)]) full = ImageSpec([Fit(400, 200)]) caption = models.CharField(max_length=100) class PortfolioImage(ImageModel): thumbnail = ImageSpec([Fit(70, 40)]) class Video(models.Model): title = models.CharField(max_length=50) video = models.FileField(upload_to='galleries/video/video') thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True) def __unicode__(self): return self.title class Meta: ordering = ['title'] class PhotoAlbum(Gallery): class GalleryMeta: member_models = [Photo] class Meta: verbose_name = 'Photo Album' class Portfolio(Gallery): class GalleryMeta: member_models = [Video] membership_class = 'PortfolioMembership' class PortfolioMembership(Portfolio.BaseMembership): extra_field = models.CharField(max_length=10) ## Instruction: Use (not so) new processor class names ## Code After: from galleries.models import Gallery, ImageModel from django.db import models from imagekit.models import ImageSpec from imagekit.processors import ResizeToFit class Photo(ImageModel): thumbnail = ImageSpec([ResizeToFit(50, 50)]) full = ImageSpec([ResizeToFit(400, 200)]) caption = models.CharField(max_length=100) class PortfolioImage(ImageModel): thumbnail = ImageSpec([ResizeToFit(70, 40)]) class Video(models.Model): title = models.CharField(max_length=50) video = models.FileField(upload_to='galleries/video/video') thumbnail = models.ImageField(upload_to='galleries/video/thumbnail', blank=True) def __unicode__(self): return self.title class Meta: ordering = ['title'] class PhotoAlbum(Gallery): class GalleryMeta: member_models = [Photo] class Meta: verbose_name = 'Photo Album' class Portfolio(Gallery): class GalleryMeta: member_models = [Video] membership_class = 'PortfolioMembership' class PortfolioMembership(Portfolio.BaseMembership): extra_field = models.CharField(max_length=10)
// ... existing code ... from galleries.models import Gallery, ImageModel from django.db import models from imagekit.models import ImageSpec from imagekit.processors import ResizeToFit class Photo(ImageModel): thumbnail = ImageSpec([ResizeToFit(50, 50)]) full = ImageSpec([ResizeToFit(400, 200)]) caption = models.CharField(max_length=100) class PortfolioImage(ImageModel): thumbnail = ImageSpec([ResizeToFit(70, 40)]) class Video(models.Model): // ... rest of the code ...
87153cb1a9727d17d31f3aabb28affddca3191bf
sqltocpp.py
sqltocpp.py
import click from sqltocpp import convert @click.command() @click.option('--sql', help='schema file name') @click.option('--target', default='schema.hpp', help='hpp file name') def execute(sql, target): convert.schema_to_struct(sql, target) if __name__ == '__main__': try: execute() except: execute("--help")
import click from sqltocpp import convert @click.command() @click.argument('sql_schema_file') @click.option('--target', default='schema.hpp', help='hpp file name') def execute(sql_schema_file, target): convert.schema_to_struct(sql_schema_file, target) if __name__ == '__main__': execute()
Add click based commandline interface script
Add click based commandline interface script sqltocpp is intended to be a CLI command. This enables it to be so
Python
mit
banjocat/SqlToCpp,banjocat/SqlToCpp
python
## Code Before: import click from sqltocpp import convert @click.command() @click.option('--sql', help='schema file name') @click.option('--target', default='schema.hpp', help='hpp file name') def execute(sql, target): convert.schema_to_struct(sql, target) if __name__ == '__main__': try: execute() except: execute("--help") ## Instruction: Add click based commandline interface script sqltocpp is intended to be a CLI command. This enables it to be so ## Code After: import click from sqltocpp import convert @click.command() @click.argument('sql_schema_file') @click.option('--target', default='schema.hpp', help='hpp file name') def execute(sql_schema_file, target): convert.schema_to_struct(sql_schema_file, target) if __name__ == '__main__': execute()
# ... existing code ... @click.command() @click.argument('sql_schema_file') @click.option('--target', default='schema.hpp', help='hpp file name') def execute(sql_schema_file, target): convert.schema_to_struct(sql_schema_file, target) if __name__ == '__main__': execute() # ... rest of the code ...
8aceb4bcfeef05874bbd6eec66eeb7b69f20f02e
pinax/blog/templatetags/pinax_blog_tags.py
pinax/blog/templatetags/pinax_blog_tags.py
from django import template from ..models import Post, Section register = template.Library() @register.assignment_tag def latest_blog_posts(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(scoper=scoper) return qs[:5] @register.assignment_tag def latest_blog_post(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(scoper=scoper) return qs[0] @register.assignment_tag def latest_section_post(section, scoper=None): qs = Post.objects.published().filter(section__name=section).order_by("-published") if scoper: qs = qs.filter(scoper=scoper) return qs[0] if qs.count() > 0 else None @register.assignment_tag def blog_sections(): return Section.objects.filter(enabled=True)
from django import template from ..models import Post, Section register = template.Library() @register.assignment_tag def latest_blog_posts(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(blog__scoper=scoper) return qs[:5] @register.assignment_tag def latest_blog_post(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(blog__scoper=scoper) return qs[0] @register.assignment_tag def latest_section_post(section, scoper=None): qs = Post.objects.published().filter(section__name=section).order_by("-published") if scoper: qs = qs.filter(blog__scoper=scoper) return qs[0] if qs.count() > 0 else None @register.assignment_tag def blog_sections(): return Section.objects.filter(enabled=True)
Fix small bug in templatetags
Fix small bug in templatetags
Python
mit
pinax/pinax-blog,pinax/pinax-blog,pinax/pinax-blog
python
## Code Before: from django import template from ..models import Post, Section register = template.Library() @register.assignment_tag def latest_blog_posts(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(scoper=scoper) return qs[:5] @register.assignment_tag def latest_blog_post(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(scoper=scoper) return qs[0] @register.assignment_tag def latest_section_post(section, scoper=None): qs = Post.objects.published().filter(section__name=section).order_by("-published") if scoper: qs = qs.filter(scoper=scoper) return qs[0] if qs.count() > 0 else None @register.assignment_tag def blog_sections(): return Section.objects.filter(enabled=True) ## Instruction: Fix small bug in templatetags ## Code After: from django import template from ..models import Post, Section register = template.Library() @register.assignment_tag def latest_blog_posts(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(blog__scoper=scoper) return qs[:5] @register.assignment_tag def latest_blog_post(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(blog__scoper=scoper) return qs[0] @register.assignment_tag def latest_section_post(section, scoper=None): qs = Post.objects.published().filter(section__name=section).order_by("-published") if scoper: qs = qs.filter(blog__scoper=scoper) return qs[0] if qs.count() > 0 else None @register.assignment_tag def blog_sections(): return Section.objects.filter(enabled=True)
... def latest_blog_posts(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(blog__scoper=scoper) return qs[:5] ... def latest_blog_post(scoper=None): qs = Post.objects.current() if scoper: qs = qs.filter(blog__scoper=scoper) return qs[0] ... def latest_section_post(section, scoper=None): qs = Post.objects.published().filter(section__name=section).order_by("-published") if scoper: qs = qs.filter(blog__scoper=scoper) return qs[0] if qs.count() > 0 else None ...
c4b7532987958573dafe01621cdd254db63bf8ea
bfg9000/builtins/hooks.py
bfg9000/builtins/hooks.py
import functools from six import iteritems _all_builtins = {} class _Binder(object): def __init__(self, args, fn): self._args = args self._fn = fn class _FunctionBinder(_Binder): def bind(self, **kwargs): # XXX: partial doesn't forward the docstring of the function. return functools.partial(self._fn, *[kwargs[i] for i in self._args]) class _VariableBinder(_Binder): def bind(self, **kwargs): return self._fn(*[kwargs[i] for i in self._args]) class _BuiltinDecorator(object): def __init__(self, binder): self.__binder = binder def __call__(self, *args): def wrapper(fn): bound = self.__binder(args, fn) _all_builtins[fn.__name__] = bound return bound return wrapper builtin = _BuiltinDecorator(_FunctionBinder)() builtin.globals = _BuiltinDecorator(_FunctionBinder) builtin.variable = _BuiltinDecorator(_VariableBinder) def bind(**kwargs): builtins = {} for k, v in iteritems(_all_builtins): builtins[k] = v.bind(builtins=builtins, **kwargs) return builtins @builtin.variable('env') def env(this_env): return this_env
import functools import inspect import sys from six import iteritems _all_builtins = {} class _Binder(object): def __init__(self, args, fn): self._args = args self._fn = fn class _FunctionBinder(_Binder): def bind(self, **kwargs): pre_args = tuple(kwargs[i] for i in self._args) @functools.wraps(self._fn) def wrapped(*args, **kwargs): return self._fn(*(pre_args + args), **kwargs) if sys.version_info >= (3, 3): sig = inspect.signature(wrapped) params = list(sig.parameters.values())[len(kwargs):] wrapped.__signature__ = inspect.Signature(params) return wrapped class _VariableBinder(_Binder): def bind(self, **kwargs): return self._fn(*[kwargs[i] for i in self._args]) class _BuiltinDecorator(object): def __init__(self, binder): self.__binder = binder def __call__(self, *args): def wrapper(fn): bound = self.__binder(args, fn) _all_builtins[fn.__name__] = bound return bound return wrapper builtin = _BuiltinDecorator(_FunctionBinder)() builtin.globals = _BuiltinDecorator(_FunctionBinder) builtin.variable = _BuiltinDecorator(_VariableBinder) def bind(**kwargs): builtins = {} for k, v in iteritems(_all_builtins): builtins[k] = v.bind(builtins=builtins, **kwargs) return builtins @builtin.variable('env') def env(this_env): return this_env
Change how the wrappers work for builtin functions so that docs get forwarded correctly
Change how the wrappers work for builtin functions so that docs get forwarded correctly
Python
bsd-3-clause
jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000
python
## Code Before: import functools from six import iteritems _all_builtins = {} class _Binder(object): def __init__(self, args, fn): self._args = args self._fn = fn class _FunctionBinder(_Binder): def bind(self, **kwargs): # XXX: partial doesn't forward the docstring of the function. return functools.partial(self._fn, *[kwargs[i] for i in self._args]) class _VariableBinder(_Binder): def bind(self, **kwargs): return self._fn(*[kwargs[i] for i in self._args]) class _BuiltinDecorator(object): def __init__(self, binder): self.__binder = binder def __call__(self, *args): def wrapper(fn): bound = self.__binder(args, fn) _all_builtins[fn.__name__] = bound return bound return wrapper builtin = _BuiltinDecorator(_FunctionBinder)() builtin.globals = _BuiltinDecorator(_FunctionBinder) builtin.variable = _BuiltinDecorator(_VariableBinder) def bind(**kwargs): builtins = {} for k, v in iteritems(_all_builtins): builtins[k] = v.bind(builtins=builtins, **kwargs) return builtins @builtin.variable('env') def env(this_env): return this_env ## Instruction: Change how the wrappers work for builtin functions so that docs get forwarded correctly ## Code After: import functools import inspect import sys from six import iteritems _all_builtins = {} class _Binder(object): def __init__(self, args, fn): self._args = args self._fn = fn class _FunctionBinder(_Binder): def bind(self, **kwargs): pre_args = tuple(kwargs[i] for i in self._args) @functools.wraps(self._fn) def wrapped(*args, **kwargs): return self._fn(*(pre_args + args), **kwargs) if sys.version_info >= (3, 3): sig = inspect.signature(wrapped) params = list(sig.parameters.values())[len(kwargs):] wrapped.__signature__ = inspect.Signature(params) return wrapped class _VariableBinder(_Binder): def bind(self, **kwargs): return self._fn(*[kwargs[i] for i in self._args]) class _BuiltinDecorator(object): def __init__(self, binder): self.__binder = binder def __call__(self, *args): def wrapper(fn): bound = self.__binder(args, fn) _all_builtins[fn.__name__] = bound return bound return wrapper builtin = _BuiltinDecorator(_FunctionBinder)() builtin.globals = _BuiltinDecorator(_FunctionBinder) builtin.variable = _BuiltinDecorator(_VariableBinder) def bind(**kwargs): builtins = {} for k, v in iteritems(_all_builtins): builtins[k] = v.bind(builtins=builtins, **kwargs) return builtins @builtin.variable('env') def env(this_env): return this_env
// ... existing code ... import functools import inspect import sys from six import iteritems _all_builtins = {} // ... modified code ... class _FunctionBinder(_Binder): def bind(self, **kwargs): pre_args = tuple(kwargs[i] for i in self._args) @functools.wraps(self._fn) def wrapped(*args, **kwargs): return self._fn(*(pre_args + args), **kwargs) if sys.version_info >= (3, 3): sig = inspect.signature(wrapped) params = list(sig.parameters.values())[len(kwargs):] wrapped.__signature__ = inspect.Signature(params) return wrapped class _VariableBinder(_Binder): // ... rest of the code ...
aaaf8ef7433418f7a195c79674db56e03fc58f10
apps/bplan/models.py
apps/bplan/models.py
from django.db import models from adhocracy4.models.base import TimeStampedModel from adhocracy4.modules import models as module_models from apps.extprojects.models import ExternalProject class Bplan(ExternalProject): office_worker_email = models.EmailField() class AnonymousItem(TimeStampedModel): module = models.ForeignKey(module_models.Module, on_delete=models.CASCADE) @property def project(self): return self.module.project class Meta: abstract = True class Statement(AnonymousItem): name = models.CharField(max_length=255) email = models.EmailField(blank=True) statement = models.TextField(max_length=17500) street_number = models.CharField(max_length=255) postal_code_city = models.CharField(max_length=255)
from django.contrib.auth.models import AnonymousUser from django.db import models from adhocracy4.models.base import TimeStampedModel from adhocracy4.modules import models as module_models from apps.extprojects.models import ExternalProject class Bplan(ExternalProject): office_worker_email = models.EmailField() class AnonymousItem(TimeStampedModel): module = models.ForeignKey(module_models.Module, on_delete=models.CASCADE) @property def project(self): return self.module.project @property def creator(self): return AnonymousUser() @creator.setter def creator(self, value): pass class Meta: abstract = True class Statement(AnonymousItem): name = models.CharField(max_length=255) email = models.EmailField(blank=True) statement = models.TextField(max_length=17500) street_number = models.CharField(max_length=255) postal_code_city = models.CharField(max_length=255)
Add mockup creator property to AnonymousItems
Add mockup creator property to AnonymousItems
Python
agpl-3.0
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
python
## Code Before: from django.db import models from adhocracy4.models.base import TimeStampedModel from adhocracy4.modules import models as module_models from apps.extprojects.models import ExternalProject class Bplan(ExternalProject): office_worker_email = models.EmailField() class AnonymousItem(TimeStampedModel): module = models.ForeignKey(module_models.Module, on_delete=models.CASCADE) @property def project(self): return self.module.project class Meta: abstract = True class Statement(AnonymousItem): name = models.CharField(max_length=255) email = models.EmailField(blank=True) statement = models.TextField(max_length=17500) street_number = models.CharField(max_length=255) postal_code_city = models.CharField(max_length=255) ## Instruction: Add mockup creator property to AnonymousItems ## Code After: from django.contrib.auth.models import AnonymousUser from django.db import models from adhocracy4.models.base import TimeStampedModel from adhocracy4.modules import models as module_models from apps.extprojects.models import ExternalProject class Bplan(ExternalProject): office_worker_email = models.EmailField() class AnonymousItem(TimeStampedModel): module = models.ForeignKey(module_models.Module, on_delete=models.CASCADE) @property def project(self): return self.module.project @property def creator(self): return AnonymousUser() @creator.setter def creator(self, value): pass class Meta: abstract = True class Statement(AnonymousItem): name = models.CharField(max_length=255) email = models.EmailField(blank=True) statement = models.TextField(max_length=17500) street_number = models.CharField(max_length=255) postal_code_city = models.CharField(max_length=255)
// ... existing code ... from django.contrib.auth.models import AnonymousUser from django.db import models from adhocracy4.models.base import TimeStampedModel // ... modified code ... def project(self): return self.module.project @property def creator(self): return AnonymousUser() @creator.setter def creator(self, value): pass class Meta: abstract = True // ... rest of the code ...
a3d4569b00d282a660a45b3c71c3da49c5a08131
src/test/java/com/hamishrickerby/http_server/DirectoryListingResponseTest.java
src/test/java/com/hamishrickerby/http_server/DirectoryListingResponseTest.java
package com.hamishrickerby.http_server; import junit.framework.TestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Created by rickerbh on 16/08/2016. */ public class DirectoryListingResponseTest extends TestCase { List<String> listing = new ArrayList<String>(Arrays.asList("test.html", "ihniwid.jpg")); public void testEnsureDirectoryListingTextMatchesFiles() { String responseText = getDirectoryListing(); assertTrue(responseText.contains("/")); assertTrue(responseText.contains(listing.get(0))); assertTrue(responseText.contains(listing.get(1))); } private String getDirectoryListing() { DirectoryListingResponse response = new DirectoryListingResponse(new Request("GET / HTTP/1.1")); response.setRootPath("./src/test/resources"); return new String(response.body()); } public void testEnsureDirectoryListingHasHTMLLinks() { String responseText = getDirectoryListing(); assertTrue(responseText.contains("<a href=\"/" + listing.get(0) + "\">" + listing.get(0) + "</a>")); assertTrue(responseText.contains("<a href=\"/" + listing.get(1) + "\">" + listing.get(1) + "</a>")); } }
package com.hamishrickerby.http_server; import junit.framework.TestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Created by rickerbh on 16/08/2016. */ public class DirectoryListingResponseTest extends TestCase { List<String> listing = new ArrayList<String>(Arrays.asList("test.html", "ihniwid.jpg")); public void testEnsureDirectoryListingTextMatchesFiles() { String responseText = getDirectoryListing(); assertTrue(responseText.contains("/")); assertTrue(responseText.contains(listing.get(0))); assertTrue(responseText.contains(listing.get(1))); } private String getDirectoryListingForPath(String location) { DirectoryListingResponse response = new DirectoryListingResponse(new Request("GET " + location + " HTTP/1.1")); response.setRootPath("./src/test/resources"); return new String(response.body()); } private String getDirectoryListing() { return getDirectoryListingForPath("/"); } public void testEnsureDirectoryListingHasHTMLLinks() { String responseText = getDirectoryListing(); assertTrue(responseText.contains("<a href=\"/" + listing.get(0) + "\">" + listing.get(0) + "</a>")); assertTrue(responseText.contains("<a href=\"/" + listing.get(1) + "\">" + listing.get(1) + "</a>")); } }
Make directory listing test helper more generic
Make directory listing test helper more generic
Java
mit
rickerbh/http_server_java,rickerbh/http_server_java
java
## Code Before: package com.hamishrickerby.http_server; import junit.framework.TestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Created by rickerbh on 16/08/2016. */ public class DirectoryListingResponseTest extends TestCase { List<String> listing = new ArrayList<String>(Arrays.asList("test.html", "ihniwid.jpg")); public void testEnsureDirectoryListingTextMatchesFiles() { String responseText = getDirectoryListing(); assertTrue(responseText.contains("/")); assertTrue(responseText.contains(listing.get(0))); assertTrue(responseText.contains(listing.get(1))); } private String getDirectoryListing() { DirectoryListingResponse response = new DirectoryListingResponse(new Request("GET / HTTP/1.1")); response.setRootPath("./src/test/resources"); return new String(response.body()); } public void testEnsureDirectoryListingHasHTMLLinks() { String responseText = getDirectoryListing(); assertTrue(responseText.contains("<a href=\"/" + listing.get(0) + "\">" + listing.get(0) + "</a>")); assertTrue(responseText.contains("<a href=\"/" + listing.get(1) + "\">" + listing.get(1) + "</a>")); } } ## Instruction: Make directory listing test helper more generic ## Code After: package com.hamishrickerby.http_server; import junit.framework.TestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Created by rickerbh on 16/08/2016. */ public class DirectoryListingResponseTest extends TestCase { List<String> listing = new ArrayList<String>(Arrays.asList("test.html", "ihniwid.jpg")); public void testEnsureDirectoryListingTextMatchesFiles() { String responseText = getDirectoryListing(); assertTrue(responseText.contains("/")); assertTrue(responseText.contains(listing.get(0))); assertTrue(responseText.contains(listing.get(1))); } private String getDirectoryListingForPath(String location) { DirectoryListingResponse response = new DirectoryListingResponse(new Request("GET " + location + " HTTP/1.1")); response.setRootPath("./src/test/resources"); return new String(response.body()); } private String getDirectoryListing() { return getDirectoryListingForPath("/"); } public void testEnsureDirectoryListingHasHTMLLinks() { String responseText = getDirectoryListing(); assertTrue(responseText.contains("<a href=\"/" + listing.get(0) + "\">" + listing.get(0) + "</a>")); assertTrue(responseText.contains("<a href=\"/" + listing.get(1) + "\">" + listing.get(1) + "</a>")); } }
... assertTrue(responseText.contains(listing.get(1))); } private String getDirectoryListingForPath(String location) { DirectoryListingResponse response = new DirectoryListingResponse(new Request("GET " + location + " HTTP/1.1")); response.setRootPath("./src/test/resources"); return new String(response.body()); } private String getDirectoryListing() { return getDirectoryListingForPath("/"); } public void testEnsureDirectoryListingHasHTMLLinks() { ...
bd761accdc38b4ed71f94048c3d9ceae05859925
fabfile/tasks/ntp.py
fabfile/tasks/ntp.py
from fabfile.config import * @task @roles('all') def get_all_time(): date = run("DATE=$( sudo date ); DATEMILLISEC=$( sudo date +%s ); echo $DATE; echo $DATEMILLISEC") return tuple(date.split('\r\n')) @task @roles('build') def verify_time_all(): result = execute('get_all_time') print result all_time = [int(date_in_millisec) for date, date_in_millisec in result.values()] all_time.sort() if (all_time[-1] - all_time[0]) > 120: raise RuntimeError("Time not synced in the nodes, Please sync and proceed:\n %s" % result) else: print "Time synced in the nodes, Proceeding to install/provision."
from fabfile.config import * @task @roles('all') def get_all_time(): date = run("DATE=$( sudo date ); DATEMILLISEC=$( sudo date +%s ); echo $DATE; echo $DATEMILLISEC") return tuple(date.split('\r\n')) @task @parallel @roles('build') def verify_time_all(): result = execute('get_all_time') all_time = [] for dates in result.values(): try: (date, date_in_millisec) = dates all_time.append(int(date_in_millisec)) except ValueError: print "ERROR: %s" % dates all_time.sort() if (all_time[-1] - all_time[0]) > 240: raise RuntimeError("Time not synced in the nodes," " Please sync and proceed:\n %s %s %s" % (result, all_time[-1], all_time[0])) else: print "Time synced in the nodes, Proceeding to install/provision."
Increase the time delta from 120 to 240 milli secs to decide the failure.
Increase the time delta from 120 to 240 milli secs to decide the failure. Change-Id: Ic51da36d79d4cd4ccac342d7242e56a23e21c07f
Python
apache-2.0
Juniper/contrail-fabric-utils,Juniper/contrail-fabric-utils
python
## Code Before: from fabfile.config import * @task @roles('all') def get_all_time(): date = run("DATE=$( sudo date ); DATEMILLISEC=$( sudo date +%s ); echo $DATE; echo $DATEMILLISEC") return tuple(date.split('\r\n')) @task @roles('build') def verify_time_all(): result = execute('get_all_time') print result all_time = [int(date_in_millisec) for date, date_in_millisec in result.values()] all_time.sort() if (all_time[-1] - all_time[0]) > 120: raise RuntimeError("Time not synced in the nodes, Please sync and proceed:\n %s" % result) else: print "Time synced in the nodes, Proceeding to install/provision." ## Instruction: Increase the time delta from 120 to 240 milli secs to decide the failure. Change-Id: Ic51da36d79d4cd4ccac342d7242e56a23e21c07f ## Code After: from fabfile.config import * @task @roles('all') def get_all_time(): date = run("DATE=$( sudo date ); DATEMILLISEC=$( sudo date +%s ); echo $DATE; echo $DATEMILLISEC") return tuple(date.split('\r\n')) @task @parallel @roles('build') def verify_time_all(): result = execute('get_all_time') all_time = [] for dates in result.values(): try: (date, date_in_millisec) = dates all_time.append(int(date_in_millisec)) except ValueError: print "ERROR: %s" % dates all_time.sort() if (all_time[-1] - all_time[0]) > 240: raise RuntimeError("Time not synced in the nodes," " Please sync and proceed:\n %s %s %s" % (result, all_time[-1], all_time[0])) else: print "Time synced in the nodes, Proceeding to install/provision."
// ... existing code ... return tuple(date.split('\r\n')) @task @parallel @roles('build') def verify_time_all(): result = execute('get_all_time') all_time = [] for dates in result.values(): try: (date, date_in_millisec) = dates all_time.append(int(date_in_millisec)) except ValueError: print "ERROR: %s" % dates all_time.sort() if (all_time[-1] - all_time[0]) > 240: raise RuntimeError("Time not synced in the nodes," " Please sync and proceed:\n %s %s %s" % (result, all_time[-1], all_time[0])) else: print "Time synced in the nodes, Proceeding to install/provision." // ... rest of the code ...
c96e82caaa3fd560263c54db71772b44e9cd78d7
examples/upgrade_local_charm_k8s.py
examples/upgrade_local_charm_k8s.py
from juju import jasyncio from juju.model import Model async def main(): model = Model() print('Connecting to model') # Connect to current model with current user, per Juju CLI await model.connect() try: print('Deploying bundle') applications = await model.deploy( './examples/k8s-local-bundle/bundle.yaml', ) print('Waiting for active') await model.wait_for_idle(status='active') print("Successfully deployed!") await applications[0].upgrade_charm(path='./examples/charms/onos.charm') await model.wait_for_idle(status='active') print('Removing bundle') for application in applications: await application.remove() finally: print('Disconnecting from model') await model.disconnect() print("Success") if __name__ == '__main__': jasyncio.run(main())
from juju import jasyncio from juju.model import Model async def main(): model = Model() print('Connecting to model') # Connect to current model with current user, per Juju CLI await model.connect() try: print('Deploying bundle') applications = await model.deploy( './examples/k8s-local-bundle/bundle.yaml', ) print('Waiting for active') await model.wait_for_idle(status='active') print("Successfully deployed!") local_path = './examples/charms/onos.charm' print('Upgrading charm with %s' % local_path) await applications[0].upgrade_charm(path=local_path) await model.wait_for_idle(status='active') print('Removing bundle') for application in applications: await application.remove() finally: print('Disconnecting from model') await model.disconnect() print("Success") if __name__ == '__main__': jasyncio.run(main())
Make the example more informative
Make the example more informative
Python
apache-2.0
juju/python-libjuju,juju/python-libjuju
python
## Code Before: from juju import jasyncio from juju.model import Model async def main(): model = Model() print('Connecting to model') # Connect to current model with current user, per Juju CLI await model.connect() try: print('Deploying bundle') applications = await model.deploy( './examples/k8s-local-bundle/bundle.yaml', ) print('Waiting for active') await model.wait_for_idle(status='active') print("Successfully deployed!") await applications[0].upgrade_charm(path='./examples/charms/onos.charm') await model.wait_for_idle(status='active') print('Removing bundle') for application in applications: await application.remove() finally: print('Disconnecting from model') await model.disconnect() print("Success") if __name__ == '__main__': jasyncio.run(main()) ## Instruction: Make the example more informative ## Code After: from juju import jasyncio from juju.model import Model async def main(): model = Model() print('Connecting to model') # Connect to current model with current user, per Juju CLI await model.connect() try: print('Deploying bundle') applications = await model.deploy( './examples/k8s-local-bundle/bundle.yaml', ) print('Waiting for active') await model.wait_for_idle(status='active') print("Successfully deployed!") local_path = './examples/charms/onos.charm' print('Upgrading charm with %s' % local_path) await applications[0].upgrade_charm(path=local_path) await model.wait_for_idle(status='active') print('Removing bundle') for application in applications: await application.remove() finally: print('Disconnecting from model') await model.disconnect() print("Success") if __name__ == '__main__': jasyncio.run(main())
// ... existing code ... await model.wait_for_idle(status='active') print("Successfully deployed!") local_path = './examples/charms/onos.charm' print('Upgrading charm with %s' % local_path) await applications[0].upgrade_charm(path=local_path) await model.wait_for_idle(status='active') // ... rest of the code ...
25b40a2998677f001e634e1cf86193e7d1d4181e
app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt
app/src/main/java/net/ericschrag/takotlin/view/RecipeView.kt
package net.ericschrag.takotlin.view import android.app.Activity import android.view.View import android.widget.TextView import fr.castorflex.android.smoothprogressbar.SmoothProgressBar import net.ericschrag.takotlin.R import org.jetbrains.anko.* import org.jetbrains.anko.appcompat.v7.toolbar class RecipeView : AnkoComponent<Activity> { var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null override fun createView(ui: AnkoContext<Activity>): View { return with(ui) { verticalLayout { toolbar { id = R.id.toolbar backgroundResource = R.color.colorPrimary lparams(width = matchParent, height = wrapContent) } loadingIndicator = smoothProgressBar { lparams(width = matchParent, height = wrapContent) isIndeterminate = true // The below are a workaround for the fact that setting indeterminate to true // auto-starts the progress bar, which is not what is wanted visibility = View.INVISIBLE progressiveStop() } verticalLayout { padding = dip(16) recipeTitle = textView { setTextAppearance(R.style.recipe_name) } } } } } fun showLoading(show: Boolean) { if (show) { loadingIndicator?.visibility = View.VISIBLE loadingIndicator?.progressiveStart() } else { loadingIndicator?.progressiveStop() } } fun showTitle(title : String) { recipeTitle?.setText(title) } }
package net.ericschrag.takotlin.view import android.app.Activity import android.view.View import android.widget.TextView import fr.castorflex.android.smoothprogressbar.SmoothProgressBar import net.ericschrag.takotlin.R import org.jetbrains.anko.* import org.jetbrains.anko.appcompat.v7.toolbar class RecipeView : AnkoComponent<Activity> { var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null var indicatorRunning: Boolean = false override fun createView(ui: AnkoContext<Activity>): View { return with(ui) { verticalLayout { toolbar { id = R.id.toolbar backgroundResource = R.color.colorPrimary lparams(width = matchParent, height = wrapContent) } loadingIndicator = smoothProgressBar { lparams(width = matchParent, height = wrapContent) isIndeterminate = true // The below are a workaround for the fact that setting indeterminate to true // auto-starts the progress bar, which is not what is wanted visibility = View.INVISIBLE progressiveStop() } verticalLayout { padding = dip(16) recipeTitle = textView { setTextAppearance(R.style.recipe_name) } } } } } fun showLoading(show: Boolean) { if (show) { loadingIndicator?.visibility = View.VISIBLE if (!indicatorRunning) { indicatorRunning = true loadingIndicator?.progressiveStart() } } else { if (indicatorRunning) { loadingIndicator?.progressiveStop() indicatorRunning = false } } } fun showTitle(title: String) { recipeTitle?.setText(title) } }
Make random loading indicator respect multiple clicks
Make random loading indicator respect multiple clicks
Kotlin
mit
Kusand/TaKotlin,Kusand/TaKotlin
kotlin
## Code Before: package net.ericschrag.takotlin.view import android.app.Activity import android.view.View import android.widget.TextView import fr.castorflex.android.smoothprogressbar.SmoothProgressBar import net.ericschrag.takotlin.R import org.jetbrains.anko.* import org.jetbrains.anko.appcompat.v7.toolbar class RecipeView : AnkoComponent<Activity> { var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null override fun createView(ui: AnkoContext<Activity>): View { return with(ui) { verticalLayout { toolbar { id = R.id.toolbar backgroundResource = R.color.colorPrimary lparams(width = matchParent, height = wrapContent) } loadingIndicator = smoothProgressBar { lparams(width = matchParent, height = wrapContent) isIndeterminate = true // The below are a workaround for the fact that setting indeterminate to true // auto-starts the progress bar, which is not what is wanted visibility = View.INVISIBLE progressiveStop() } verticalLayout { padding = dip(16) recipeTitle = textView { setTextAppearance(R.style.recipe_name) } } } } } fun showLoading(show: Boolean) { if (show) { loadingIndicator?.visibility = View.VISIBLE loadingIndicator?.progressiveStart() } else { loadingIndicator?.progressiveStop() } } fun showTitle(title : String) { recipeTitle?.setText(title) } } ## Instruction: Make random loading indicator respect multiple clicks ## Code After: package net.ericschrag.takotlin.view import android.app.Activity import android.view.View import android.widget.TextView import fr.castorflex.android.smoothprogressbar.SmoothProgressBar import net.ericschrag.takotlin.R import org.jetbrains.anko.* import org.jetbrains.anko.appcompat.v7.toolbar class RecipeView : AnkoComponent<Activity> { var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null var indicatorRunning: Boolean = false override fun createView(ui: AnkoContext<Activity>): View { return with(ui) { verticalLayout { toolbar { id = R.id.toolbar backgroundResource = R.color.colorPrimary lparams(width = matchParent, height = wrapContent) } loadingIndicator = smoothProgressBar { lparams(width = matchParent, height = wrapContent) isIndeterminate = true // The below are a workaround for the fact that setting indeterminate to true // auto-starts the progress bar, which is not what is wanted visibility = View.INVISIBLE progressiveStop() } verticalLayout { padding = dip(16) recipeTitle = textView { setTextAppearance(R.style.recipe_name) } } } } } fun showLoading(show: Boolean) { if (show) { loadingIndicator?.visibility = View.VISIBLE if (!indicatorRunning) { indicatorRunning = true loadingIndicator?.progressiveStart() } } else { if (indicatorRunning) { loadingIndicator?.progressiveStop() indicatorRunning = false } } } fun showTitle(title: String) { recipeTitle?.setText(title) } }
... class RecipeView : AnkoComponent<Activity> { var loadingIndicator: SmoothProgressBar? = null var recipeTitle: TextView? = null var indicatorRunning: Boolean = false override fun createView(ui: AnkoContext<Activity>): View { return with(ui) { ... fun showLoading(show: Boolean) { if (show) { loadingIndicator?.visibility = View.VISIBLE if (!indicatorRunning) { indicatorRunning = true loadingIndicator?.progressiveStart() } } else { if (indicatorRunning) { loadingIndicator?.progressiveStop() indicatorRunning = false } } } fun showTitle(title: String) { recipeTitle?.setText(title) } ...
15a792e38152e9c7aa6a10bbc251e9b5f0df1341
aurora/optim/sgd.py
aurora/optim/sgd.py
import numpy as np from .base import Base class SGD(Base): def __init__(self, cost, params, lr=0.1, momentum=0.9): super().__init__(cost, params, lr) self.momentum = momentum self.velocity = self._init_velocity_vec(params) def step(self, feed_dict): exe_output = self.executor.run(feed_dict) for i in range(len(self.params)): self.velocity[i] = self.momentum * self.velocity[i] - self.lr * exe_output[1 + i] self.params[i].const += self.velocity[i] return exe_output[0] @staticmethod def _init_velocity_vec(params): vector = [] for param in params: vector.append(np.zeros_like(param.const)) return vector
import numpy as np from .base import Base class SGD(Base): def __init__(self, cost, params, lr=0.1, momentum=0.9): super().__init__(cost, params, lr) self.momentum = momentum self.velocity = [np.zeros_like(param.const)for param in params] def step(self, feed_dict): exe_output = self.executor.run(feed_dict) for i in range(len(self.params)): self.velocity[i] = self.momentum * self.velocity[i] - self.lr * exe_output[1 + i] self.params[i].const += self.velocity[i] return exe_output[0]
Improve velocity list initialisation in SGD
Improve velocity list initialisation in SGD
Python
apache-2.0
upul/Aurora,upul/Aurora,upul/Aurora
python
## Code Before: import numpy as np from .base import Base class SGD(Base): def __init__(self, cost, params, lr=0.1, momentum=0.9): super().__init__(cost, params, lr) self.momentum = momentum self.velocity = self._init_velocity_vec(params) def step(self, feed_dict): exe_output = self.executor.run(feed_dict) for i in range(len(self.params)): self.velocity[i] = self.momentum * self.velocity[i] - self.lr * exe_output[1 + i] self.params[i].const += self.velocity[i] return exe_output[0] @staticmethod def _init_velocity_vec(params): vector = [] for param in params: vector.append(np.zeros_like(param.const)) return vector ## Instruction: Improve velocity list initialisation in SGD ## Code After: import numpy as np from .base import Base class SGD(Base): def __init__(self, cost, params, lr=0.1, momentum=0.9): super().__init__(cost, params, lr) self.momentum = momentum self.velocity = [np.zeros_like(param.const)for param in params] def step(self, feed_dict): exe_output = self.executor.run(feed_dict) for i in range(len(self.params)): self.velocity[i] = self.momentum * self.velocity[i] - self.lr * exe_output[1 + i] self.params[i].const += self.velocity[i] return exe_output[0]
// ... existing code ... def __init__(self, cost, params, lr=0.1, momentum=0.9): super().__init__(cost, params, lr) self.momentum = momentum self.velocity = [np.zeros_like(param.const)for param in params] def step(self, feed_dict): exe_output = self.executor.run(feed_dict) // ... modified code ... self.params[i].const += self.velocity[i] return exe_output[0] // ... rest of the code ...
e77118ccf3ada7623a16cc4a1634215ae5a18a85
plugins/filePrediction/src/com/intellij/filePrediction/history/FilePredictionHistory.kt
plugins/filePrediction/src/com/intellij/filePrediction/history/FilePredictionHistory.kt
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.filePrediction.history import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.ServiceManager import com.intellij.openapi.project.Project import com.intellij.openapi.project.ProjectManager import com.intellij.openapi.project.ProjectManagerListener class FilePredictionHistory(val project: Project) { companion object { private const val RECENT_FILES_LIMIT = 50 fun getInstance(project: Project): FilePredictionHistory { return ServiceManager.getService(project, FilePredictionHistory::class.java) } } private var manager: FileHistoryManager init { manager = FileHistoryManager(FileHistoryPersistence.loadFileHistory(project), RECENT_FILES_LIMIT) project.messageBus.connect().subscribe(ProjectManager.TOPIC, object : ProjectManagerListener { override fun projectClosing(project: Project) { ApplicationManager.getApplication().executeOnPooledThread { FileHistoryPersistence.saveFileHistory(project, manager.getState()) } } }) } fun onFileOpened(fileUrl: String) = manager.onFileOpened(fileUrl) fun calcHistoryFeatures(fileUrl: String): FileHistoryFeatures = manager.calcHistoryFeatures(fileUrl) fun size(): Int = manager.size() fun cleanup() = manager.cleanup() }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.filePrediction.history import com.intellij.openapi.Disposable import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.ServiceManager import com.intellij.openapi.project.Project import com.intellij.openapi.project.ProjectManager import com.intellij.openapi.project.ProjectManagerListener class FilePredictionHistory(val project: Project) : Disposable { companion object { private const val RECENT_FILES_LIMIT = 50 fun getInstance(project: Project): FilePredictionHistory { return ServiceManager.getService(project, FilePredictionHistory::class.java) } } private var manager: FileHistoryManager init { manager = FileHistoryManager(FileHistoryPersistence.loadFileHistory(project), RECENT_FILES_LIMIT) project.messageBus.connect(this).subscribe(ProjectManager.TOPIC, object : ProjectManagerListener { override fun projectClosing(project: Project) { ApplicationManager.getApplication().executeOnPooledThread { FileHistoryPersistence.saveFileHistory(project, manager.getState()) } } }) } fun onFileOpened(fileUrl: String) = manager.onFileOpened(fileUrl) fun calcHistoryFeatures(fileUrl: String): FileHistoryFeatures = manager.calcHistoryFeatures(fileUrl) fun size(): Int = manager.size() fun cleanup() = manager.cleanup() override fun dispose() { } }
Use correct disposable for message bus subscription (fixes NextFilePrediction case of IDEA-232470)
Use correct disposable for message bus subscription (fixes NextFilePrediction case of IDEA-232470) GitOrigin-RevId: 361e874fefac47bc9f213036f7743e6515bc30a1
Kotlin
apache-2.0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
kotlin
## Code Before: // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.filePrediction.history import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.ServiceManager import com.intellij.openapi.project.Project import com.intellij.openapi.project.ProjectManager import com.intellij.openapi.project.ProjectManagerListener class FilePredictionHistory(val project: Project) { companion object { private const val RECENT_FILES_LIMIT = 50 fun getInstance(project: Project): FilePredictionHistory { return ServiceManager.getService(project, FilePredictionHistory::class.java) } } private var manager: FileHistoryManager init { manager = FileHistoryManager(FileHistoryPersistence.loadFileHistory(project), RECENT_FILES_LIMIT) project.messageBus.connect().subscribe(ProjectManager.TOPIC, object : ProjectManagerListener { override fun projectClosing(project: Project) { ApplicationManager.getApplication().executeOnPooledThread { FileHistoryPersistence.saveFileHistory(project, manager.getState()) } } }) } fun onFileOpened(fileUrl: String) = manager.onFileOpened(fileUrl) fun calcHistoryFeatures(fileUrl: String): FileHistoryFeatures = manager.calcHistoryFeatures(fileUrl) fun size(): Int = manager.size() fun cleanup() = manager.cleanup() } ## Instruction: Use correct disposable for message bus subscription (fixes NextFilePrediction case of IDEA-232470) GitOrigin-RevId: 361e874fefac47bc9f213036f7743e6515bc30a1 ## Code After: // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.filePrediction.history import com.intellij.openapi.Disposable import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.ServiceManager import com.intellij.openapi.project.Project import com.intellij.openapi.project.ProjectManager import com.intellij.openapi.project.ProjectManagerListener class FilePredictionHistory(val project: Project) : Disposable { companion object { private const val RECENT_FILES_LIMIT = 50 fun getInstance(project: Project): FilePredictionHistory { return ServiceManager.getService(project, FilePredictionHistory::class.java) } } private var manager: FileHistoryManager init { manager = FileHistoryManager(FileHistoryPersistence.loadFileHistory(project), RECENT_FILES_LIMIT) project.messageBus.connect(this).subscribe(ProjectManager.TOPIC, object : ProjectManagerListener { override fun projectClosing(project: Project) { ApplicationManager.getApplication().executeOnPooledThread { FileHistoryPersistence.saveFileHistory(project, manager.getState()) } } }) } fun onFileOpened(fileUrl: String) = manager.onFileOpened(fileUrl) fun calcHistoryFeatures(fileUrl: String): FileHistoryFeatures = manager.calcHistoryFeatures(fileUrl) fun size(): Int = manager.size() fun cleanup() = manager.cleanup() override fun dispose() { } }
# ... existing code ... // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.filePrediction.history import com.intellij.openapi.Disposable import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.ServiceManager import com.intellij.openapi.project.Project # ... modified code ... import com.intellij.openapi.project.ProjectManager import com.intellij.openapi.project.ProjectManagerListener class FilePredictionHistory(val project: Project) : Disposable { companion object { private const val RECENT_FILES_LIMIT = 50 ... init { manager = FileHistoryManager(FileHistoryPersistence.loadFileHistory(project), RECENT_FILES_LIMIT) project.messageBus.connect(this).subscribe(ProjectManager.TOPIC, object : ProjectManagerListener { override fun projectClosing(project: Project) { ApplicationManager.getApplication().executeOnPooledThread { FileHistoryPersistence.saveFileHistory(project, manager.getState()) ... fun size(): Int = manager.size() fun cleanup() = manager.cleanup() override fun dispose() { } } # ... rest of the code ...
fe1d8b2172aecf4f2f7cebe3c61eeb778f3db23a
src/cms/apps/historylinks/middleware.py
src/cms/apps/historylinks/middleware.py
"""Middleware used by the history links service.""" from django.shortcuts import redirect from cms.apps.historylinks.models import HistoryLink class HistoryLinkFallbackMiddleware(object): """Middleware that attempts to rescue 404 responses with a redirect to it's new location.""" def process_response(self, request, response): """Attempts to rescue 404 responses.""" if response.status_code == 404: # Try to rescue the response. try: link = HistoryLink.objects.get(path=request.path) path = link.object.get_absolute_url() if path != request.path: return redirect(link.object, permanent=True) return response except HistoryLink.DoesNotExist: pass return response
"""Middleware used by the history links service.""" from django.shortcuts import redirect from cms.apps.historylinks.models import HistoryLink class HistoryLinkFallbackMiddleware(object): """Middleware that attempts to rescue 404 responses with a redirect to it's new location.""" def process_response(self, request, response): """Attempts to rescue 404 responses.""" if response.status_code == 404: # Try to rescue the response. try: link = HistoryLink.objects.get(path=request.path) obj = link.object if obj: path = obj.get_absolute_url() if path != request.path: return redirect(link.object, permanent=True) return response except HistoryLink.DoesNotExist: pass return response
Fix for historylinks connecting to missing objects
Fix for historylinks connecting to missing objects
Python
bsd-3-clause
etianen/cms,etianen/cms,danielsamuels/cms,danielsamuels/cms,danielsamuels/cms,dan-gamble/cms,etianen/cms,lewiscollard/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,jamesfoley/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,jamesfoley/cms
python
## Code Before: """Middleware used by the history links service.""" from django.shortcuts import redirect from cms.apps.historylinks.models import HistoryLink class HistoryLinkFallbackMiddleware(object): """Middleware that attempts to rescue 404 responses with a redirect to it's new location.""" def process_response(self, request, response): """Attempts to rescue 404 responses.""" if response.status_code == 404: # Try to rescue the response. try: link = HistoryLink.objects.get(path=request.path) path = link.object.get_absolute_url() if path != request.path: return redirect(link.object, permanent=True) return response except HistoryLink.DoesNotExist: pass return response ## Instruction: Fix for historylinks connecting to missing objects ## Code After: """Middleware used by the history links service.""" from django.shortcuts import redirect from cms.apps.historylinks.models import HistoryLink class HistoryLinkFallbackMiddleware(object): """Middleware that attempts to rescue 404 responses with a redirect to it's new location.""" def process_response(self, request, response): """Attempts to rescue 404 responses.""" if response.status_code == 404: # Try to rescue the response. try: link = HistoryLink.objects.get(path=request.path) obj = link.object if obj: path = obj.get_absolute_url() if path != request.path: return redirect(link.object, permanent=True) return response except HistoryLink.DoesNotExist: pass return response
# ... existing code ... # Try to rescue the response. try: link = HistoryLink.objects.get(path=request.path) obj = link.object if obj: path = obj.get_absolute_url() if path != request.path: return redirect(link.object, permanent=True) return response except HistoryLink.DoesNotExist: pass # ... rest of the code ...
14cdf6b7a82e49f1860aee41e4b1a5b20cf179b2
quickstats/signals.py
quickstats/signals.py
import logging from . import tasks from django.db.models.signals import post_save from django.dispatch import receiver logger = logging.getLogger(__name__) @receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart") def hook_update_data(sender, instance, *args, **kwargs): tasks.update_chart.delay(instance.widget_id) @receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location") def hook_update_data(sender, instance, *args, **kwargs): tasks.update_location.delay(instance.widget_id)
import logging from . import tasks from django.db.models.signals import post_save from django.dispatch import receiver logger = logging.getLogger(__name__) @receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart") def hook_update_chart(sender, instance, *args, **kwargs): tasks.update_chart.delay(instance.widget_id) @receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location") def hook_update_location(sender, instance, *args, **kwargs): tasks.update_location.delay(instance.widget_id)
Make unique names for signal functions
Make unique names for signal functions
Python
mit
kfdm/django-simplestats,kfdm/django-simplestats
python
## Code Before: import logging from . import tasks from django.db.models.signals import post_save from django.dispatch import receiver logger = logging.getLogger(__name__) @receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart") def hook_update_data(sender, instance, *args, **kwargs): tasks.update_chart.delay(instance.widget_id) @receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location") def hook_update_data(sender, instance, *args, **kwargs): tasks.update_location.delay(instance.widget_id) ## Instruction: Make unique names for signal functions ## Code After: import logging from . import tasks from django.db.models.signals import post_save from django.dispatch import receiver logger = logging.getLogger(__name__) @receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart") def hook_update_chart(sender, instance, *args, **kwargs): tasks.update_chart.delay(instance.widget_id) @receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location") def hook_update_location(sender, instance, *args, **kwargs): tasks.update_location.delay(instance.widget_id)
... @receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart") def hook_update_chart(sender, instance, *args, **kwargs): tasks.update_chart.delay(instance.widget_id) @receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location") def hook_update_location(sender, instance, *args, **kwargs): tasks.update_location.delay(instance.widget_id) ...
3e683e4ab0a606bcd4a841acabb9f47c4d2be79c
src/main/java/gsg/infrastructure/Utils.java
src/main/java/gsg/infrastructure/Utils.java
package gsg.infrastructure; import gsg.threads.IJob; import gsg.threads.JobRunner; import gsg.threads.JobRunnerConfiguration; /** * @author [email protected] * Created: 23.07.15 9:37 */ public class Utils { public static String getMessage(String line) { final int i = line.indexOf(" "); if (i > -1 && i < line.length()) { return line.substring(i+1, line.length()); } else { return null; } } public static String getKey(String line) { final int i = line.indexOf(" "); if (i > -1) { return line.substring(0, i); } else { return null; } } public static JobRunner runLoop(IJob job) { final JobRunnerConfiguration configuration = new JobRunnerConfiguration(); final JobRunner runner = new JobRunner(configuration, job); runner.start(); return runner; } public static void runLoopAndJoin(IJob job) { final JobRunner runner = runLoop(job); try { runner.join(); } catch (InterruptedException e) { e.printStackTrace(); } } }
package gsg.infrastructure; import gsg.threads.IJob; import gsg.threads.JobRunner; import gsg.threads.JobRunnerConfiguration; /** * @author [email protected] * Created: 23.07.15 9:37 */ public class Utils { public static String getMessage(String line) { final int i = line.indexOf(" "); if (i > -1 && i < line.length()) { return line.substring(i+1, line.length()); } else { return null; } } public static String getKey(String line) { final int i = line.indexOf(" "); if (i > -1) { return line.substring(0, i); } else { return null; } } public static JobRunner runLoop(IJob job) { final JobRunnerConfiguration configuration = new JobRunnerConfiguration(); configuration.setDoLog(false); final JobRunner runner = new JobRunner(configuration, job); runner.start(); return runner; } public static void runLoopAndJoin(IJob job) { final JobRunner runner = runLoop(job); try { runner.join(); } catch (InterruptedException e) { e.printStackTrace(); } } }
Set default configuration value of JobRunner.doLog to false
Set default configuration value of JobRunner.doLog to false
Java
apache-2.0
cor-serpentis/server
java
## Code Before: package gsg.infrastructure; import gsg.threads.IJob; import gsg.threads.JobRunner; import gsg.threads.JobRunnerConfiguration; /** * @author [email protected] * Created: 23.07.15 9:37 */ public class Utils { public static String getMessage(String line) { final int i = line.indexOf(" "); if (i > -1 && i < line.length()) { return line.substring(i+1, line.length()); } else { return null; } } public static String getKey(String line) { final int i = line.indexOf(" "); if (i > -1) { return line.substring(0, i); } else { return null; } } public static JobRunner runLoop(IJob job) { final JobRunnerConfiguration configuration = new JobRunnerConfiguration(); final JobRunner runner = new JobRunner(configuration, job); runner.start(); return runner; } public static void runLoopAndJoin(IJob job) { final JobRunner runner = runLoop(job); try { runner.join(); } catch (InterruptedException e) { e.printStackTrace(); } } } ## Instruction: Set default configuration value of JobRunner.doLog to false ## Code After: package gsg.infrastructure; import gsg.threads.IJob; import gsg.threads.JobRunner; import gsg.threads.JobRunnerConfiguration; /** * @author [email protected] * Created: 23.07.15 9:37 */ public class Utils { public static String getMessage(String line) { final int i = line.indexOf(" "); if (i > -1 && i < line.length()) { return line.substring(i+1, line.length()); } else { return null; } } public static String getKey(String line) { final int i = line.indexOf(" "); if (i > -1) { return line.substring(0, i); } else { return null; } } public static JobRunner runLoop(IJob job) { final JobRunnerConfiguration configuration = new JobRunnerConfiguration(); configuration.setDoLog(false); final JobRunner runner = new JobRunner(configuration, job); runner.start(); return runner; } public static void runLoopAndJoin(IJob job) { final JobRunner runner = runLoop(job); try { runner.join(); } catch (InterruptedException e) { e.printStackTrace(); } } }
# ... existing code ... public static JobRunner runLoop(IJob job) { final JobRunnerConfiguration configuration = new JobRunnerConfiguration(); configuration.setDoLog(false); final JobRunner runner = new JobRunner(configuration, job); runner.start(); return runner; # ... rest of the code ...
54c7d5704a148b703fb0db74eeeb66c187134faf
gratipay/utils/pricing.py
gratipay/utils/pricing.py
from decimal import Decimal as D, ROUND_HALF_EVEN def suggested_payment(usage): if usage >= 500: percentage = D('0.02') elif usage >= 20: percentage = D('0.05') else: percentage = D('0.10') suggestion = usage * percentage if suggestion == 0: rounded = suggestion elif suggestion < 0.25: rounded = D('0.25') elif suggestion < 0.50: rounded = D('0.50') elif suggestion < 1: rounded = D('1.00') else: rounded = suggestion.quantize(D('0'), ROUND_HALF_EVEN) return rounded def suggested_payment_low_high(usage): # Above $500/wk we suggest 2%. if usage >= 5000: low = D('100.00') high = D('1000.00') elif usage >= 500: low = D('10.00') high = D('100.00') # From $20 to $499 we suggest 5%. elif usage >= 100: low = D('5.00') high = D('25.00') elif usage >= 20: low = D('1.00') high = D('5.00') # Below $20 we suggest 10%. elif usage >= 5: low = D('0.50') high = D('2.00') else: low = D('0.10') high = D('1.00') return low, high
from decimal import Decimal as D, ROUND_HALF_EVEN def suggested_payment(usage): percentage = D('0.05') suggestion = usage * percentage rounded = suggestion.quantize(D('0'), ROUND_HALF_EVEN) return rounded def suggested_payment_low_high(usage): # Above $500/wk we suggest 2%. if usage >= 5000: low = D('100.00') high = D('1000.00') elif usage >= 500: low = D('10.00') high = D('100.00') # From $20 to $499 we suggest 5%. elif usage >= 100: low = D('5.00') high = D('25.00') elif usage >= 20: low = D('1.00') high = D('5.00') # Below $20 we suggest 10%. elif usage >= 5: low = D('0.50') high = D('2.00') else: low = D('0.10') high = D('1.00') return low, high
Change suggested payment to flat 5%
Change suggested payment to flat 5%
Python
mit
eXcomm/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com
python
## Code Before: from decimal import Decimal as D, ROUND_HALF_EVEN def suggested_payment(usage): if usage >= 500: percentage = D('0.02') elif usage >= 20: percentage = D('0.05') else: percentage = D('0.10') suggestion = usage * percentage if suggestion == 0: rounded = suggestion elif suggestion < 0.25: rounded = D('0.25') elif suggestion < 0.50: rounded = D('0.50') elif suggestion < 1: rounded = D('1.00') else: rounded = suggestion.quantize(D('0'), ROUND_HALF_EVEN) return rounded def suggested_payment_low_high(usage): # Above $500/wk we suggest 2%. if usage >= 5000: low = D('100.00') high = D('1000.00') elif usage >= 500: low = D('10.00') high = D('100.00') # From $20 to $499 we suggest 5%. elif usage >= 100: low = D('5.00') high = D('25.00') elif usage >= 20: low = D('1.00') high = D('5.00') # Below $20 we suggest 10%. elif usage >= 5: low = D('0.50') high = D('2.00') else: low = D('0.10') high = D('1.00') return low, high ## Instruction: Change suggested payment to flat 5% ## Code After: from decimal import Decimal as D, ROUND_HALF_EVEN def suggested_payment(usage): percentage = D('0.05') suggestion = usage * percentage rounded = suggestion.quantize(D('0'), ROUND_HALF_EVEN) return rounded def suggested_payment_low_high(usage): # Above $500/wk we suggest 2%. if usage >= 5000: low = D('100.00') high = D('1000.00') elif usage >= 500: low = D('10.00') high = D('100.00') # From $20 to $499 we suggest 5%. elif usage >= 100: low = D('5.00') high = D('25.00') elif usage >= 20: low = D('1.00') high = D('5.00') # Below $20 we suggest 10%. elif usage >= 5: low = D('0.50') high = D('2.00') else: low = D('0.10') high = D('1.00') return low, high
... def suggested_payment(usage): percentage = D('0.05') suggestion = usage * percentage rounded = suggestion.quantize(D('0'), ROUND_HALF_EVEN) return rounded ...
56016d4d623ef9b85eee0114d3465b00fded1624
src/main/java/com/google/sps/servlets/Logout.java
src/main/java/com/google/sps/servlets/Logout.java
package com.google.sps.servlets; import java.net.*; import com.google.cloud.language.v1.Document; import java.io.PrintWriter; import com.google.appengine.api.datastore.*; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.google.cloud.language.v1.LanguageServiceClient; import com.google.cloud.language.v1.Sentiment; import java.io.IOException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.google.firebase.auth.FirebaseToken; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseAuthException; import com.google.firebase.auth.SessionCookieOptions; import java.util.concurrent.TimeUnit; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.Cookie; import com.google.sps.firebase.FirebaseAppManager;
package com.google.sps.servlets; import java.io.IOException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @WebServlet("/sign-out") public class Logout extends HttpServlet { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { final int maxAge = 0; Cookie newCookie = new Cookie("session", " "); newCookie.setMaxAge(maxAge); // Set the expiration date to immediate deletion response.addCookie(newCookie); response.sendRedirect("/index.html"); // Go back to the main page } }
Make current cookie expire for logging out.
Make current cookie expire for logging out.
Java
apache-2.0
googleinterns/step87-2020,googleinterns/step87-2020,googleinterns/step87-2020,googleinterns/step87-2020
java
## Code Before: package com.google.sps.servlets; import java.net.*; import com.google.cloud.language.v1.Document; import java.io.PrintWriter; import com.google.appengine.api.datastore.*; import com.google.appengine.api.users.UserService; import com.google.appengine.api.users.UserServiceFactory; import com.google.cloud.language.v1.LanguageServiceClient; import com.google.cloud.language.v1.Sentiment; import java.io.IOException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.google.firebase.auth.FirebaseToken; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseAuthException; import com.google.firebase.auth.SessionCookieOptions; import java.util.concurrent.TimeUnit; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.Cookie; import com.google.sps.firebase.FirebaseAppManager; ## Instruction: Make current cookie expire for logging out. ## Code After: package com.google.sps.servlets; import java.io.IOException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @WebServlet("/sign-out") public class Logout extends HttpServlet { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { final int maxAge = 0; Cookie newCookie = new Cookie("session", " "); newCookie.setMaxAge(maxAge); // Set the expiration date to immediate deletion response.addCookie(newCookie); response.sendRedirect("/index.html"); // Go back to the main page } }
// ... existing code ... package com.google.sps.servlets; import java.io.IOException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @WebServlet("/sign-out") public class Logout extends HttpServlet { @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { final int maxAge = 0; Cookie newCookie = new Cookie("session", " "); newCookie.setMaxAge(maxAge); // Set the expiration date to immediate deletion response.addCookie(newCookie); response.sendRedirect("/index.html"); // Go back to the main page } } // ... rest of the code ...
0ce1a01b7969f9e20febdd0da39a69504570ce8c
src/tools/gen/template/elektra_gen.c
src/tools/gen/template/elektra_gen.c
/** * @file * * @brief * * @copyright BSD License (see doc/LICENSE.md or https://www.libelektra.org) */ #compiler-settings directiveStartToken = @ cheetahVarStartToken = $ #end compiler-settings @from support.elektra_gen import * @set support = ElektraGenSupport() @for $key, $info in $parameters.iteritems() @if $support.type_of($info) == "enum" ELEKTRA_DEFINITIONS ($support.enum_type($key), $support.enum_type_name($key), "enum", KDB_LONG_TO_STRING, KDB_STRING_TO_LONG) @end if @end for
/** * @file * * @brief * * @copyright BSD License (see doc/LICENSE.md or https://www.libelektra.org) */ #compiler-settings directiveStartToken = @ cheetahVarStartToken = $ #end compiler-settings @from support.elektra_gen import * @set support = ElektraGenSupport() #include <stdlib.h> #include <elektra.h> #include <kdbhelper.h> #include "elektra_gen.h" KDBType KDB_TYPE_ENUM = "enum"; #define KDB_ENUM_TO_STRING(value) elektraFormat (ELEKTRA_LONG_F, value) #define KDB_STRING_TO_ENUM(string) (kdb_long_t) strtoul (string, NULL, 10) @for $key, $info in $parameters.iteritems() @if $support.type_of($info) == "enum" ELEKTRA_DEFINITIONS ($support.enum_type($key), $support.enum_type_name($key), KDB_TYPE_ENUM, KDB_ENUM_TO_STRING, KDB_STRING_TO_ENUM) @end if @end for #undef KDB_ENUM_TO_STRING #undef KDB_STRING_TO_ENUM
Add defines for enum type and conversions
codegen: Add defines for enum type and conversions
C
bsd-3-clause
ElektraInitiative/libelektra,mpranj/libelektra,mpranj/libelektra,BernhardDenner/libelektra,petermax2/libelektra,BernhardDenner/libelektra,petermax2/libelektra,ElektraInitiative/libelektra,petermax2/libelektra,petermax2/libelektra,BernhardDenner/libelektra,ElektraInitiative/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,petermax2/libelektra,BernhardDenner/libelektra,ElektraInitiative/libelektra,ElektraInitiative/libelektra,ElektraInitiative/libelektra,BernhardDenner/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,ElektraInitiative/libelektra,BernhardDenner/libelektra,petermax2/libelektra,BernhardDenner/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,mpranj/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,BernhardDenner/libelektra,mpranj/libelektra,mpranj/libelektra,mpranj/libelektra,petermax2/libelektra,mpranj/libelektra,petermax2/libelektra,BernhardDenner/libelektra,petermax2/libelektra
c
## Code Before: /** * @file * * @brief * * @copyright BSD License (see doc/LICENSE.md or https://www.libelektra.org) */ #compiler-settings directiveStartToken = @ cheetahVarStartToken = $ #end compiler-settings @from support.elektra_gen import * @set support = ElektraGenSupport() @for $key, $info in $parameters.iteritems() @if $support.type_of($info) == "enum" ELEKTRA_DEFINITIONS ($support.enum_type($key), $support.enum_type_name($key), "enum", KDB_LONG_TO_STRING, KDB_STRING_TO_LONG) @end if @end for ## Instruction: codegen: Add defines for enum type and conversions ## Code After: /** * @file * * @brief * * @copyright BSD License (see doc/LICENSE.md or https://www.libelektra.org) */ #compiler-settings directiveStartToken = @ cheetahVarStartToken = $ #end compiler-settings @from support.elektra_gen import * @set support = ElektraGenSupport() #include <stdlib.h> #include <elektra.h> #include <kdbhelper.h> #include "elektra_gen.h" KDBType KDB_TYPE_ENUM = "enum"; #define KDB_ENUM_TO_STRING(value) elektraFormat (ELEKTRA_LONG_F, value) #define KDB_STRING_TO_ENUM(string) (kdb_long_t) strtoul (string, NULL, 10) @for $key, $info in $parameters.iteritems() @if $support.type_of($info) == "enum" ELEKTRA_DEFINITIONS ($support.enum_type($key), $support.enum_type_name($key), KDB_TYPE_ENUM, KDB_ENUM_TO_STRING, KDB_STRING_TO_ENUM) @end if @end for #undef KDB_ENUM_TO_STRING #undef KDB_STRING_TO_ENUM
// ... existing code ... @from support.elektra_gen import * @set support = ElektraGenSupport() #include <stdlib.h> #include <elektra.h> #include <kdbhelper.h> #include "elektra_gen.h" KDBType KDB_TYPE_ENUM = "enum"; #define KDB_ENUM_TO_STRING(value) elektraFormat (ELEKTRA_LONG_F, value) #define KDB_STRING_TO_ENUM(string) (kdb_long_t) strtoul (string, NULL, 10) @for $key, $info in $parameters.iteritems() @if $support.type_of($info) == "enum" ELEKTRA_DEFINITIONS ($support.enum_type($key), $support.enum_type_name($key), KDB_TYPE_ENUM, KDB_ENUM_TO_STRING, KDB_STRING_TO_ENUM) @end if @end for #undef KDB_ENUM_TO_STRING #undef KDB_STRING_TO_ENUM // ... rest of the code ...