commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
17c191e8231e6429309bd6cc8e826eb38b9d8d89
include/bubblesorts-bubble-inl.h
include/bubblesorts-bubble-inl.h
namespace sorting { namespace bubble { template <class T> void BubbleSort(T * const array, const int N) /** * Bubble sort: Bubble sort * Scaling: * Best case: * Worst case: * Useful: * */ { } } // namespace bubble } // namespace sorting #endif // SORTING_INCLUDE_BUBBLESORTS_BUBBLE_INL_H_
namespace sorting { namespace bubble { template <class T> void BubbleSort(T * const array, const int N) /** * Bubble sort: Bubble sort * Scaling: * Best case: * Worst case: * Useful: * */ { int pass_count = 0; // Number of pass over the array. int swap_count = 0; // Number of swap for a single pass. // Pass over the array while the swap count is non-null. do { // It's a new pass; reset the count of swap. swap_count = 0; // Iterate over the array, skipping the last item for (int i = 0 ; i < N-1 ; i++) { // Swap elements if next one is "smaller" and register the swap. if (array[i] > array[i+1]) { std::swap(array[i], array[i+1]); swap_count++; } } pass_count++; } while (swap_count != 0); } } // namespace bubble } // namespace sorting #endif // SORTING_INCLUDE_BUBBLESORTS_BUBBLE_INL_H_
Add working bubble sort implementation
Add working bubble sort implementation
C
bsd-3-clause
nbigaouette/sorting,nbigaouette/sorting,nbigaouette/sorting,nbigaouette/sorting
c
## Code Before: namespace sorting { namespace bubble { template <class T> void BubbleSort(T * const array, const int N) /** * Bubble sort: Bubble sort * Scaling: * Best case: * Worst case: * Useful: * */ { } } // namespace bubble } // namespace sorting #endif // SORTING_INCLUDE_BUBBLESORTS_BUBBLE_INL_H_ ## Instruction: Add working bubble sort implementation ## Code After: namespace sorting { namespace bubble { template <class T> void BubbleSort(T * const array, const int N) /** * Bubble sort: Bubble sort * Scaling: * Best case: * Worst case: * Useful: * */ { int pass_count = 0; // Number of pass over the array. int swap_count = 0; // Number of swap for a single pass. // Pass over the array while the swap count is non-null. do { // It's a new pass; reset the count of swap. swap_count = 0; // Iterate over the array, skipping the last item for (int i = 0 ; i < N-1 ; i++) { // Swap elements if next one is "smaller" and register the swap. if (array[i] > array[i+1]) { std::swap(array[i], array[i+1]); swap_count++; } } pass_count++; } while (swap_count != 0); } } // namespace bubble } // namespace sorting #endif // SORTING_INCLUDE_BUBBLESORTS_BUBBLE_INL_H_
// ... existing code ... * */ { int pass_count = 0; // Number of pass over the array. int swap_count = 0; // Number of swap for a single pass. // Pass over the array while the swap count is non-null. do { // It's a new pass; reset the count of swap. swap_count = 0; // Iterate over the array, skipping the last item for (int i = 0 ; i < N-1 ; i++) { // Swap elements if next one is "smaller" and register the swap. if (array[i] > array[i+1]) { std::swap(array[i], array[i+1]); swap_count++; } } pass_count++; } while (swap_count != 0); } } // namespace bubble // ... rest of the code ...
63c1d7db06bb3bd12f0d4eeae36e188bb006c440
src/edu/northwestern/bioinformatics/studycalendar/web/template/SelectArmController.java
src/edu/northwestern/bioinformatics/studycalendar/web/template/SelectArmController.java
package edu.northwestern.bioinformatics.studycalendar.web.template; import edu.northwestern.bioinformatics.studycalendar.web.ReturnSingleObjectController; import edu.northwestern.bioinformatics.studycalendar.domain.Arm; import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.AccessControl; import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.StudyCalendarProtectionGroup; /** * @author Rhett Sutphin */ @AccessControl(protectionGroups = StudyCalendarProtectionGroup.STUDY_COORDINATOR) public class SelectArmController extends ReturnSingleObjectController<Arm> { public SelectArmController() { setParameterName("arm"); setViewName("template/ajax/selectArm"); } @Override protected Object wrapObject(Arm loaded) { return new ArmTemplate(loaded); } }
package edu.northwestern.bioinformatics.studycalendar.web.template; import edu.northwestern.bioinformatics.studycalendar.web.ReturnSingleObjectController; import edu.northwestern.bioinformatics.studycalendar.domain.Arm; import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.AccessControl; import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.StudyCalendarProtectionGroup; /** * @author Rhett Sutphin */ @AccessControl(protectionGroups = StudyCalendarProtectionGroup.BASE) public class SelectArmController extends ReturnSingleObjectController<Arm> { public SelectArmController() { setParameterName("arm"); setViewName("template/ajax/selectArm"); } @Override protected Object wrapObject(Arm loaded) { return new ArmTemplate(loaded); } }
Use base because everyone can use the template view
Use base because everyone can use the template view git-svn-id: 4b387fe5ada7764508e2ca96c335714e4c1692c6@750 0d517254-b314-0410-acde-c619094fa49f
Java
bsd-3-clause
NUBIC/psc-mirror,NUBIC/psc-mirror,NUBIC/psc-mirror,NUBIC/psc-mirror
java
## Code Before: package edu.northwestern.bioinformatics.studycalendar.web.template; import edu.northwestern.bioinformatics.studycalendar.web.ReturnSingleObjectController; import edu.northwestern.bioinformatics.studycalendar.domain.Arm; import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.AccessControl; import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.StudyCalendarProtectionGroup; /** * @author Rhett Sutphin */ @AccessControl(protectionGroups = StudyCalendarProtectionGroup.STUDY_COORDINATOR) public class SelectArmController extends ReturnSingleObjectController<Arm> { public SelectArmController() { setParameterName("arm"); setViewName("template/ajax/selectArm"); } @Override protected Object wrapObject(Arm loaded) { return new ArmTemplate(loaded); } } ## Instruction: Use base because everyone can use the template view git-svn-id: 4b387fe5ada7764508e2ca96c335714e4c1692c6@750 0d517254-b314-0410-acde-c619094fa49f ## Code After: package edu.northwestern.bioinformatics.studycalendar.web.template; import edu.northwestern.bioinformatics.studycalendar.web.ReturnSingleObjectController; import edu.northwestern.bioinformatics.studycalendar.domain.Arm; import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.AccessControl; import edu.northwestern.bioinformatics.studycalendar.utils.accesscontrol.StudyCalendarProtectionGroup; /** * @author Rhett Sutphin */ @AccessControl(protectionGroups = StudyCalendarProtectionGroup.BASE) public class SelectArmController extends ReturnSingleObjectController<Arm> { public SelectArmController() { setParameterName("arm"); setViewName("template/ajax/selectArm"); } @Override protected Object wrapObject(Arm loaded) { return new ArmTemplate(loaded); } }
// ... existing code ... /** * @author Rhett Sutphin */ @AccessControl(protectionGroups = StudyCalendarProtectionGroup.BASE) public class SelectArmController extends ReturnSingleObjectController<Arm> { public SelectArmController() { setParameterName("arm"); // ... rest of the code ...
315da880c81e02e8c576f51266dffaf19abf8e13
commen5/templatetags/commen5_tags.py
commen5/templatetags/commen5_tags.py
from django import template from django.template.defaulttags import CommentNode register = template.Library() def commen5(parser, token): """ Ignores everything between ``{% commen5 %}`` and ``{% endcommen5 %}``. """ parser.skip_past('endcommen5') return CommentNode() commen5 = register.tag(commen5)
from django import template from django.template.defaulttags import CommentNode register = template.Library() @register.tag def commen5(parser, token): """ Ignores everything between ``{% commen5 %}`` and ``{% endcommen5 %}``. """ parser.skip_past('endcommen5') return CommentNode()
Use the sexy, new decorator style syntax.
Use the sexy, new decorator style syntax.
Python
mit
bradmontgomery/django-commen5
python
## Code Before: from django import template from django.template.defaulttags import CommentNode register = template.Library() def commen5(parser, token): """ Ignores everything between ``{% commen5 %}`` and ``{% endcommen5 %}``. """ parser.skip_past('endcommen5') return CommentNode() commen5 = register.tag(commen5) ## Instruction: Use the sexy, new decorator style syntax. ## Code After: from django import template from django.template.defaulttags import CommentNode register = template.Library() @register.tag def commen5(parser, token): """ Ignores everything between ``{% commen5 %}`` and ``{% endcommen5 %}``. """ parser.skip_past('endcommen5') return CommentNode()
... register = template.Library() @register.tag def commen5(parser, token): """ Ignores everything between ``{% commen5 %}`` and ``{% endcommen5 %}``. ... """ parser.skip_past('endcommen5') return CommentNode() ...
38d665c82ba3dedc51f597f519dac84546588638
include/shmlog_tags.h
include/shmlog_tags.h
/* * $Id$ * * Define the tags in the shared memory in a reusable format. * Whoever includes this get to define what the SLTM macro does. * */ SLTM(CLI) SLTM(SessionOpen) SLTM(SessionClose) SLTM(ClientAddr) SLTM(Request) SLTM(URL) SLTM(Protocol) SLTM(Headers)
/* * $Id$ * * Define the tags in the shared memory in a reusable format. * Whoever includes this get to define what the SLTM macro does. * */ SLTM(CLI) SLTM(SessionOpen) SLTM(SessionClose) SLTM(ClientAddr) SLTM(Request) SLTM(URL) SLTM(Protocol) SLTM(H_Unknown) #define HTTPH(a, b) SLTM(b) #include "http_headers.h" #undef HTTPH
Use http_headers.h to define HTTP header tags for logging
Use http_headers.h to define HTTP header tags for logging git-svn-id: 2c9807fa3ff65b17195bd55dc8a6c4261e10127b@90 d4fa192b-c00b-0410-8231-f00ffab90ce4
C
bsd-2-clause
varnish/Varnish-Cache,ajasty-cavium/Varnish-Cache,ssm/pkg-varnish,zhoualbeart/Varnish-Cache,franciscovg/Varnish-Cache,ajasty-cavium/Varnish-Cache,ambernetas/varnish-cache,zhoualbeart/Varnish-Cache,gauthier-delacroix/Varnish-Cache,drwilco/varnish-cache-drwilco,drwilco/varnish-cache-drwilco,ssm/pkg-varnish,feld/Varnish-Cache,mrhmouse/Varnish-Cache,ssm/pkg-varnish,wikimedia/operations-debs-varnish,ajasty-cavium/Varnish-Cache,1HLtd/Varnish-Cache,1HLtd/Varnish-Cache,gauthier-delacroix/Varnish-Cache,mrhmouse/Varnish-Cache,chrismoulton/Varnish-Cache,gquintard/Varnish-Cache,gauthier-delacroix/Varnish-Cache,feld/Varnish-Cache,alarky/varnish-cache-doc-ja,alarky/varnish-cache-doc-ja,1HLtd/Varnish-Cache,feld/Varnish-Cache,ajasty-cavium/Varnish-Cache,drwilco/varnish-cache-old,gquintard/Varnish-Cache,franciscovg/Varnish-Cache,alarky/varnish-cache-doc-ja,wikimedia/operations-debs-varnish,wikimedia/operations-debs-varnish,ambernetas/varnish-cache,gauthier-delacroix/Varnish-Cache,chrismoulton/Varnish-Cache,varnish/Varnish-Cache,mrhmouse/Varnish-Cache,franciscovg/Varnish-Cache,wikimedia/operations-debs-varnish,alarky/varnish-cache-doc-ja,drwilco/varnish-cache-drwilco,alarky/varnish-cache-doc-ja,zhoualbeart/Varnish-Cache,franciscovg/Varnish-Cache,drwilco/varnish-cache-old,mrhmouse/Varnish-Cache,zhoualbeart/Varnish-Cache,ajasty-cavium/Varnish-Cache,gquintard/Varnish-Cache,zhoualbeart/Varnish-Cache,ambernetas/varnish-cache,chrismoulton/Varnish-Cache,1HLtd/Varnish-Cache,varnish/Varnish-Cache,feld/Varnish-Cache,gauthier-delacroix/Varnish-Cache,feld/Varnish-Cache,ssm/pkg-varnish,gquintard/Varnish-Cache,varnish/Varnish-Cache,chrismoulton/Varnish-Cache,franciscovg/Varnish-Cache,wikimedia/operations-debs-varnish,ssm/pkg-varnish,varnish/Varnish-Cache,chrismoulton/Varnish-Cache,drwilco/varnish-cache-old,mrhmouse/Varnish-Cache
c
## Code Before: /* * $Id$ * * Define the tags in the shared memory in a reusable format. * Whoever includes this get to define what the SLTM macro does. * */ SLTM(CLI) SLTM(SessionOpen) SLTM(SessionClose) SLTM(ClientAddr) SLTM(Request) SLTM(URL) SLTM(Protocol) SLTM(Headers) ## Instruction: Use http_headers.h to define HTTP header tags for logging git-svn-id: 2c9807fa3ff65b17195bd55dc8a6c4261e10127b@90 d4fa192b-c00b-0410-8231-f00ffab90ce4 ## Code After: /* * $Id$ * * Define the tags in the shared memory in a reusable format. * Whoever includes this get to define what the SLTM macro does. * */ SLTM(CLI) SLTM(SessionOpen) SLTM(SessionClose) SLTM(ClientAddr) SLTM(Request) SLTM(URL) SLTM(Protocol) SLTM(H_Unknown) #define HTTPH(a, b) SLTM(b) #include "http_headers.h" #undef HTTPH
# ... existing code ... SLTM(Request) SLTM(URL) SLTM(Protocol) SLTM(H_Unknown) #define HTTPH(a, b) SLTM(b) #include "http_headers.h" #undef HTTPH # ... rest of the code ...
e99f7b6d25464f36accc2f04899edfa9e982bee2
tests/cpydiff/core_fstring_concat.py
tests/cpydiff/core_fstring_concat.py
x = 1 print("aa" f"{x}") print(f"{x}" "ab") print("a{}a" f"{x}") print(f"{x}" "a{}b")
x, y = 1, 2 print("aa" f"{x}") # works print(f"{x}" "ab") # works print("a{}a" f"{x}") # fails print(f"{x}" "a{}b") # fails print(f"{x}" f"{y}") # fails
Clarify f-string diffs regarding concatenation.
tests/cpydiff: Clarify f-string diffs regarding concatenation. Concatenation of any literals (including f-strings) should be avoided. Signed-off-by: Jim Mussared <[email protected]>
Python
mit
adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython
python
## Code Before: x = 1 print("aa" f"{x}") print(f"{x}" "ab") print("a{}a" f"{x}") print(f"{x}" "a{}b") ## Instruction: tests/cpydiff: Clarify f-string diffs regarding concatenation. Concatenation of any literals (including f-strings) should be avoided. Signed-off-by: Jim Mussared <[email protected]> ## Code After: x, y = 1, 2 print("aa" f"{x}") # works print(f"{x}" "ab") # works print("a{}a" f"{x}") # fails print(f"{x}" "a{}b") # fails print(f"{x}" f"{y}") # fails
... x, y = 1, 2 print("aa" f"{x}") # works print(f"{x}" "ab") # works print("a{}a" f"{x}") # fails print(f"{x}" "a{}b") # fails print(f"{x}" f"{y}") # fails ...
178e567201f564cfe383586c0c002c65092660ff
src/main/java/ee/shy/cli/command/CompletionCommand.java
src/main/java/ee/shy/cli/command/CompletionCommand.java
package ee.shy.cli.command; import ee.shy.cli.Command; import ee.shy.cli.SuperCommand; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; public class CompletionCommand implements Command { private final Command rootCommand; public CompletionCommand(Command rootCommand) { this.rootCommand = rootCommand; } @Override public void execute(String[] args) throws IOException { List<String> argsList = new ArrayList<>(Arrays.asList(args)); argsList.add(null); Command command = rootCommand; for (String arg : argsList) { if (command instanceof SuperCommand) { Map<String, Command> subCommands = ((SuperCommand) command).getSubCommands(); Command subCommand = subCommands.get(arg); if (subCommand != null) { command = subCommand; } else { System.out.println(String.join(" ", subCommands.keySet())); break; } } else if (command instanceof HelpCommand) { command = ((HelpCommand) command).getRootCommand(); } } } @Override public String getHelp() { return null; } }
package ee.shy.cli.command; import ee.shy.cli.Command; import ee.shy.cli.SuperCommand; import java.io.IOException; import java.util.Map; public class CompletionCommand implements Command { private final Command rootCommand; public CompletionCommand(Command rootCommand) { this.rootCommand = rootCommand; } @Override public void execute(String[] args) throws IOException { Command command = rootCommand; for (int i = 0; i <= args.length; i++) { // loop for one extra time for supercommands without arguments if (command instanceof SuperCommand) { Map<String, Command> subCommands = ((SuperCommand) command).getSubCommands(); Command subCommand; if ((i < args.length - 1) // complete subcommand even if fully typed (don't nest yet) && ((subCommand = subCommands.get(args[i])) != null)) { command = subCommand; } else { System.out.println(String.join(" ", subCommands.keySet())); break; } } else if (command instanceof HelpCommand) { command = ((HelpCommand) command).getRootCommand(); // changed command point without parsing extra argument i--; // step back for that extra argument } } } @Override public String getHelp() { return null; } }
Fix bash completion for HelpCommand and fully typed subcommands
Fix bash completion for HelpCommand and fully typed subcommands
Java
mit
sim642/shy,sim642/shy
java
## Code Before: package ee.shy.cli.command; import ee.shy.cli.Command; import ee.shy.cli.SuperCommand; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; public class CompletionCommand implements Command { private final Command rootCommand; public CompletionCommand(Command rootCommand) { this.rootCommand = rootCommand; } @Override public void execute(String[] args) throws IOException { List<String> argsList = new ArrayList<>(Arrays.asList(args)); argsList.add(null); Command command = rootCommand; for (String arg : argsList) { if (command instanceof SuperCommand) { Map<String, Command> subCommands = ((SuperCommand) command).getSubCommands(); Command subCommand = subCommands.get(arg); if (subCommand != null) { command = subCommand; } else { System.out.println(String.join(" ", subCommands.keySet())); break; } } else if (command instanceof HelpCommand) { command = ((HelpCommand) command).getRootCommand(); } } } @Override public String getHelp() { return null; } } ## Instruction: Fix bash completion for HelpCommand and fully typed subcommands ## Code After: package ee.shy.cli.command; import ee.shy.cli.Command; import ee.shy.cli.SuperCommand; import java.io.IOException; import java.util.Map; public class CompletionCommand implements Command { private final Command rootCommand; public CompletionCommand(Command rootCommand) { this.rootCommand = rootCommand; } @Override public void execute(String[] args) throws IOException { Command command = rootCommand; for (int i = 0; i <= args.length; i++) { // loop for one extra time for supercommands without arguments if (command instanceof SuperCommand) { Map<String, Command> subCommands = ((SuperCommand) command).getSubCommands(); Command subCommand; if ((i < args.length - 1) // complete subcommand even if fully typed (don't nest yet) && ((subCommand = subCommands.get(args[i])) != null)) { command = subCommand; } else { System.out.println(String.join(" ", subCommands.keySet())); break; } } else if (command instanceof HelpCommand) { command = ((HelpCommand) command).getRootCommand(); // changed command point without parsing extra argument i--; // step back for that extra argument } } } @Override public String getHelp() { return null; } }
// ... existing code ... import ee.shy.cli.SuperCommand; import java.io.IOException; import java.util.Map; public class CompletionCommand implements Command { // ... modified code ... @Override public void execute(String[] args) throws IOException { Command command = rootCommand; for (int i = 0; i <= args.length; i++) { // loop for one extra time for supercommands without arguments if (command instanceof SuperCommand) { Map<String, Command> subCommands = ((SuperCommand) command).getSubCommands(); Command subCommand; if ((i < args.length - 1) // complete subcommand even if fully typed (don't nest yet) && ((subCommand = subCommands.get(args[i])) != null)) { command = subCommand; } else { ... } } else if (command instanceof HelpCommand) { command = ((HelpCommand) command).getRootCommand(); // changed command point without parsing extra argument i--; // step back for that extra argument } } } // ... rest of the code ...
70c659b68235c4d36dd02c255304523710003884
wangle/ssl/SSLStats.h
wangle/ssl/SSLStats.h
/* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ #pragma once namespace wangle { class SSLStats { public: virtual ~SSLStats() noexcept {} // downstream virtual void recordSSLAcceptLatency(int64_t latency) noexcept = 0; virtual void recordTLSTicket(bool ticketNew, bool ticketHit) noexcept = 0; virtual void recordSSLSession(bool sessionNew, bool sessionHit, bool foreign) noexcept = 0; virtual void recordSSLSessionRemove() noexcept = 0; virtual void recordSSLSessionFree(uint32_t freed) noexcept = 0; virtual void recordSSLSessionSetError(uint32_t err) noexcept = 0; virtual void recordSSLSessionGetError(uint32_t err) noexcept = 0; virtual void recordClientRenegotiation() noexcept = 0; // upstream virtual void recordSSLUpstreamConnection(bool handshake) noexcept = 0; virtual void recordSSLUpstreamConnectionError(bool verifyError) noexcept = 0; }; } // namespace wangle
/* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ #pragma once namespace wangle { class SSLStats { public: virtual ~SSLStats() noexcept {} // downstream virtual void recordSSLAcceptLatency(int64_t latency) noexcept = 0; virtual void recordTLSTicket(bool ticketNew, bool ticketHit) noexcept = 0; virtual void recordSSLSession(bool sessionNew, bool sessionHit, bool foreign) noexcept = 0; virtual void recordSSLSessionRemove() noexcept = 0; virtual void recordSSLSessionFree(uint32_t freed) noexcept = 0; virtual void recordSSLSessionSetError(uint32_t err) noexcept = 0; virtual void recordSSLSessionGetError(uint32_t err) noexcept = 0; virtual void recordClientRenegotiation() noexcept = 0; virtual void recordSSLClientCertificateMismatch() noexcept = 0; // upstream virtual void recordSSLUpstreamConnection(bool handshake) noexcept = 0; virtual void recordSSLUpstreamConnectionError(bool verifyError) noexcept = 0; }; } // namespace wangle
Add a counter for client cert verification mismatches
Add a counter for client cert verification mismatches Summary: As per title Reviewed By: yfeldblum Differential Revision: D4665764 fbshipit-source-id: 7051a3958900ac3c3387f1b7e08e45ae38abcc73
C
apache-2.0
facebook/wangle,facebook/wangle,facebook/wangle
c
## Code Before: /* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ #pragma once namespace wangle { class SSLStats { public: virtual ~SSLStats() noexcept {} // downstream virtual void recordSSLAcceptLatency(int64_t latency) noexcept = 0; virtual void recordTLSTicket(bool ticketNew, bool ticketHit) noexcept = 0; virtual void recordSSLSession(bool sessionNew, bool sessionHit, bool foreign) noexcept = 0; virtual void recordSSLSessionRemove() noexcept = 0; virtual void recordSSLSessionFree(uint32_t freed) noexcept = 0; virtual void recordSSLSessionSetError(uint32_t err) noexcept = 0; virtual void recordSSLSessionGetError(uint32_t err) noexcept = 0; virtual void recordClientRenegotiation() noexcept = 0; // upstream virtual void recordSSLUpstreamConnection(bool handshake) noexcept = 0; virtual void recordSSLUpstreamConnectionError(bool verifyError) noexcept = 0; }; } // namespace wangle ## Instruction: Add a counter for client cert verification mismatches Summary: As per title Reviewed By: yfeldblum Differential Revision: D4665764 fbshipit-source-id: 7051a3958900ac3c3387f1b7e08e45ae38abcc73 ## Code After: /* * Copyright (c) 2017, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ #pragma once namespace wangle { class SSLStats { public: virtual ~SSLStats() noexcept {} // downstream virtual void recordSSLAcceptLatency(int64_t latency) noexcept = 0; virtual void recordTLSTicket(bool ticketNew, bool ticketHit) noexcept = 0; virtual void recordSSLSession(bool sessionNew, bool sessionHit, bool foreign) noexcept = 0; virtual void recordSSLSessionRemove() noexcept = 0; virtual void recordSSLSessionFree(uint32_t freed) noexcept = 0; virtual void recordSSLSessionSetError(uint32_t err) noexcept = 0; virtual void recordSSLSessionGetError(uint32_t err) noexcept = 0; virtual void recordClientRenegotiation() noexcept = 0; virtual void recordSSLClientCertificateMismatch() noexcept = 0; // upstream virtual void recordSSLUpstreamConnection(bool handshake) noexcept = 0; virtual void recordSSLUpstreamConnectionError(bool verifyError) noexcept = 0; }; } // namespace wangle
// ... existing code ... virtual void recordSSLSessionSetError(uint32_t err) noexcept = 0; virtual void recordSSLSessionGetError(uint32_t err) noexcept = 0; virtual void recordClientRenegotiation() noexcept = 0; virtual void recordSSLClientCertificateMismatch() noexcept = 0; // upstream virtual void recordSSLUpstreamConnection(bool handshake) noexcept = 0; // ... rest of the code ...
8f2598ac9d730bf0a7c08b9cdb6071fd7b73ba3b
utilities/cassandra/merge_operator.h
utilities/cassandra/merge_operator.h
// Copyright (c) 2017-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). #pragma once #include "rocksdb/merge_operator.h" #include "rocksdb/slice.h" namespace rocksdb { namespace cassandra { /** * A MergeOperator for rocksdb that implements Cassandra row value merge. */ class CassandraValueMergeOperator : public MergeOperator { public: static std::shared_ptr<MergeOperator> CreateSharedInstance(); virtual bool FullMergeV2(const MergeOperationInput& merge_in, MergeOperationOutput* merge_out) const override; virtual bool PartialMergeMulti(const Slice& key, const std::deque<Slice>& operand_list, std::string* new_value, Logger* logger) const override; virtual const char* Name() const override; }; } // namespace cassandra } // namespace rocksdb
// Copyright (c) 2017-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). #pragma once #include "rocksdb/merge_operator.h" #include "rocksdb/slice.h" namespace rocksdb { namespace cassandra { /** * A MergeOperator for rocksdb that implements Cassandra row value merge. */ class CassandraValueMergeOperator : public MergeOperator { public: static std::shared_ptr<MergeOperator> CreateSharedInstance(); virtual bool FullMergeV2(const MergeOperationInput& merge_in, MergeOperationOutput* merge_out) const override; virtual bool PartialMergeMulti(const Slice& key, const std::deque<Slice>& operand_list, std::string* new_value, Logger* logger) const override; virtual const char* Name() const override; virtual bool AllowSingleOperand() const override { return true; } }; } // namespace cassandra } // namespace rocksdb
Enable Cassandra merge operator to be called with a single merge operand
Enable Cassandra merge operator to be called with a single merge operand Summary: Updating Cassandra merge operator to make use of a single merge operand when needed. Single merge operand support has been introduced in #2721. Closes https://github.com/facebook/rocksdb/pull/2753 Differential Revision: D5652867 Pulled By: sagar0 fbshipit-source-id: b9fbd3196d3ebd0b752626dbf9bec9aa53e3e26a
C
bsd-3-clause
bbiao/rocksdb,bbiao/rocksdb,Andymic/rocksdb,Andymic/rocksdb,SunguckLee/RocksDB,Andymic/rocksdb,Andymic/rocksdb,SunguckLee/RocksDB,bbiao/rocksdb,SunguckLee/RocksDB,SunguckLee/RocksDB,Andymic/rocksdb,SunguckLee/RocksDB,bbiao/rocksdb,Andymic/rocksdb,bbiao/rocksdb,bbiao/rocksdb,Andymic/rocksdb,Andymic/rocksdb,bbiao/rocksdb,SunguckLee/RocksDB,SunguckLee/RocksDB,SunguckLee/RocksDB,bbiao/rocksdb
c
## Code Before: // Copyright (c) 2017-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). #pragma once #include "rocksdb/merge_operator.h" #include "rocksdb/slice.h" namespace rocksdb { namespace cassandra { /** * A MergeOperator for rocksdb that implements Cassandra row value merge. */ class CassandraValueMergeOperator : public MergeOperator { public: static std::shared_ptr<MergeOperator> CreateSharedInstance(); virtual bool FullMergeV2(const MergeOperationInput& merge_in, MergeOperationOutput* merge_out) const override; virtual bool PartialMergeMulti(const Slice& key, const std::deque<Slice>& operand_list, std::string* new_value, Logger* logger) const override; virtual const char* Name() const override; }; } // namespace cassandra } // namespace rocksdb ## Instruction: Enable Cassandra merge operator to be called with a single merge operand Summary: Updating Cassandra merge operator to make use of a single merge operand when needed. Single merge operand support has been introduced in #2721. Closes https://github.com/facebook/rocksdb/pull/2753 Differential Revision: D5652867 Pulled By: sagar0 fbshipit-source-id: b9fbd3196d3ebd0b752626dbf9bec9aa53e3e26a ## Code After: // Copyright (c) 2017-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). #pragma once #include "rocksdb/merge_operator.h" #include "rocksdb/slice.h" namespace rocksdb { namespace cassandra { /** * A MergeOperator for rocksdb that implements Cassandra row value merge. */ class CassandraValueMergeOperator : public MergeOperator { public: static std::shared_ptr<MergeOperator> CreateSharedInstance(); virtual bool FullMergeV2(const MergeOperationInput& merge_in, MergeOperationOutput* merge_out) const override; virtual bool PartialMergeMulti(const Slice& key, const std::deque<Slice>& operand_list, std::string* new_value, Logger* logger) const override; virtual const char* Name() const override; virtual bool AllowSingleOperand() const override { return true; } }; } // namespace cassandra } // namespace rocksdb
... Logger* logger) const override; virtual const char* Name() const override; virtual bool AllowSingleOperand() const override { return true; } }; } // namespace cassandra } // namespace rocksdb ...
8e26fa46ffdb9442254712b4083a973ab9ce6577
Python/tangshi.py
Python/tangshi.py
import sys import re import codecs ping = re.compile(u'.平') shang = re.compile(u'上聲') ru = re.compile(u'入') qu = re.compile(u'去') mydict = { } # f = open("../Data/TangRhymesMap.csv") f = codecs.open("../Data/TangRhymesMap.csv", "r", "utf-8") for line in f: line = line.rstrip() value, key = line.split(",") #key = char.decode("utf-8") #value = rhyme.decode("utf-8") mydict[key] = value f = codecs.open("../Data/SamplePoem.txt", "r", "utf-8") for line in f: line = line.rstrip() for key in line: if ping.match(mydict[key]): print key + " = " + " Ping" elif shang.match(mydict[key]): print key + " = " + " Shang" elif qu.match(mydict[key]): print key + " = " + " Qu" elif ru.match(mydict[key]): print key + " = " + " Ru" else: print key + " = " + " *"
import sys import re import codecs ping = re.compile(u'.平') shang = re.compile(u'上聲') ru = re.compile(u'入') qu = re.compile(u'去') mydict = { } # f = open("../Data/TangRhymesMap.csv") f = codecs.open("../Data/TangRhymesMap.csv", "r", "utf-8") for line in f: line = line.rstrip() value, key = line.split(",") #key = char.decode("utf-8") #value = rhyme.decode("utf-8") mydict[key] = value f = codecs.open("../Data/SamplePoem.txt", "r", "utf-8") for line in f: line = line.rstrip() for key in line: if key not in mydict: print key elif ping.match(mydict[key]): print key + " = " + " Ping" elif shang.match(mydict[key]): print key + " = " + " Shang" elif qu.match(mydict[key]): print key + " = " + " Qu" elif ru.match(mydict[key]): print key + " = " + " Ru" else: print key + " = " + " *"
Print the character without Rhyme if it is not on the Rhyme Dictionary
Print the character without Rhyme if it is not on the Rhyme Dictionary
Python
apache-2.0
jmworsley/TangShi
python
## Code Before: import sys import re import codecs ping = re.compile(u'.平') shang = re.compile(u'上聲') ru = re.compile(u'入') qu = re.compile(u'去') mydict = { } # f = open("../Data/TangRhymesMap.csv") f = codecs.open("../Data/TangRhymesMap.csv", "r", "utf-8") for line in f: line = line.rstrip() value, key = line.split(",") #key = char.decode("utf-8") #value = rhyme.decode("utf-8") mydict[key] = value f = codecs.open("../Data/SamplePoem.txt", "r", "utf-8") for line in f: line = line.rstrip() for key in line: if ping.match(mydict[key]): print key + " = " + " Ping" elif shang.match(mydict[key]): print key + " = " + " Shang" elif qu.match(mydict[key]): print key + " = " + " Qu" elif ru.match(mydict[key]): print key + " = " + " Ru" else: print key + " = " + " *" ## Instruction: Print the character without Rhyme if it is not on the Rhyme Dictionary ## Code After: import sys import re import codecs ping = re.compile(u'.平') shang = re.compile(u'上聲') ru = re.compile(u'入') qu = re.compile(u'去') mydict = { } # f = open("../Data/TangRhymesMap.csv") f = codecs.open("../Data/TangRhymesMap.csv", "r", "utf-8") for line in f: line = line.rstrip() value, key = line.split(",") #key = char.decode("utf-8") #value = rhyme.decode("utf-8") mydict[key] = value f = codecs.open("../Data/SamplePoem.txt", "r", "utf-8") for line in f: line = line.rstrip() for key in line: if key not in mydict: print key elif ping.match(mydict[key]): print key + " = " + " Ping" elif shang.match(mydict[key]): print key + " = " + " Shang" elif qu.match(mydict[key]): print key + " = " + " Qu" elif ru.match(mydict[key]): print key + " = " + " Ru" else: print key + " = " + " *"
// ... existing code ... for line in f: line = line.rstrip() for key in line: if key not in mydict: print key elif ping.match(mydict[key]): print key + " = " + " Ping" elif shang.match(mydict[key]): print key + " = " + " Shang" // ... rest of the code ...
ca4be3892ec0c1b5bc337a9fae10503b5f7f765a
bika/lims/browser/validation.py
bika/lims/browser/validation.py
from Products.Archetypes.browser.validation import InlineValidationView as _IVV from Acquisition import aq_inner from Products.CMFCore.utils import getToolByName import json SKIP_VALIDATION_FIELDTYPES = ('image', 'file', 'datetime', 'reference') class InlineValidationView(_IVV): def __call__(self, uid, fname, value): '''Validate a given field. Return any error messages. ''' res = {'errmsg': ''} if value not in self.request: return json.dumps(res) rc = getToolByName(aq_inner(self.context), 'reference_catalog') instance = rc.lookupObject(uid) # make sure this works for portal_factory items if instance is None: instance = self.context field = instance.getField(fname) if field and field.type not in SKIP_VALIDATION_FIELDTYPES: return super(InlineValidationView, self).__call__(uid, fname, value) self.request.response.setHeader('Content-Type', 'application/json') return json.dumps(res)
from Products.Archetypes.browser.validation import InlineValidationView as _IVV from Acquisition import aq_inner from Products.CMFCore.utils import getToolByName import json SKIP_VALIDATION_FIELDTYPES = ('image', 'file', 'datetime', 'reference') class InlineValidationView(_IVV): def __call__(self, uid, fname, value): '''Validate a given field. Return any error messages. ''' res = {'errmsg': ''} rc = getToolByName(aq_inner(self.context), 'reference_catalog') instance = rc.lookupObject(uid) # make sure this works for portal_factory items if instance is None: instance = self.context field = instance.getField(fname) if field and field.type not in SKIP_VALIDATION_FIELDTYPES: return super(InlineValidationView, self).__call__(uid, fname, value) self.request.response.setHeader('Content-Type', 'application/json') return json.dumps(res)
Revert "Inline Validation fails silently if request is malformed"
Revert "Inline Validation fails silently if request is malformed" This reverts commit 723e4eb603568d3a60190d8d292cc335a74b79d5.
Python
agpl-3.0
labsanmartin/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,rockfruit/bika.lims,veroc/Bika-LIMS,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,anneline/Bika-LIMS,rockfruit/bika.lims,labsanmartin/Bika-LIMS
python
## Code Before: from Products.Archetypes.browser.validation import InlineValidationView as _IVV from Acquisition import aq_inner from Products.CMFCore.utils import getToolByName import json SKIP_VALIDATION_FIELDTYPES = ('image', 'file', 'datetime', 'reference') class InlineValidationView(_IVV): def __call__(self, uid, fname, value): '''Validate a given field. Return any error messages. ''' res = {'errmsg': ''} if value not in self.request: return json.dumps(res) rc = getToolByName(aq_inner(self.context), 'reference_catalog') instance = rc.lookupObject(uid) # make sure this works for portal_factory items if instance is None: instance = self.context field = instance.getField(fname) if field and field.type not in SKIP_VALIDATION_FIELDTYPES: return super(InlineValidationView, self).__call__(uid, fname, value) self.request.response.setHeader('Content-Type', 'application/json') return json.dumps(res) ## Instruction: Revert "Inline Validation fails silently if request is malformed" This reverts commit 723e4eb603568d3a60190d8d292cc335a74b79d5. ## Code After: from Products.Archetypes.browser.validation import InlineValidationView as _IVV from Acquisition import aq_inner from Products.CMFCore.utils import getToolByName import json SKIP_VALIDATION_FIELDTYPES = ('image', 'file', 'datetime', 'reference') class InlineValidationView(_IVV): def __call__(self, uid, fname, value): '''Validate a given field. Return any error messages. ''' res = {'errmsg': ''} rc = getToolByName(aq_inner(self.context), 'reference_catalog') instance = rc.lookupObject(uid) # make sure this works for portal_factory items if instance is None: instance = self.context field = instance.getField(fname) if field and field.type not in SKIP_VALIDATION_FIELDTYPES: return super(InlineValidationView, self).__call__(uid, fname, value) self.request.response.setHeader('Content-Type', 'application/json') return json.dumps(res)
// ... existing code ... ''' res = {'errmsg': ''} rc = getToolByName(aq_inner(self.context), 'reference_catalog') instance = rc.lookupObject(uid) # make sure this works for portal_factory items // ... rest of the code ...
9681b103a8ac4994df5048f3c9114ae5a3368f5a
hackerrank/two-arrays/Solution.java
hackerrank/two-arrays/Solution.java
public class Solution { public static void main(String[] args) throws Exception { } }
import java.io.*; import java.util.Arrays; import java.util.StringTokenizer; /** * @author shengmin */ public class Solution { boolean solve(int[] A, int[] B, int K) { Arrays.sort(A); Arrays.sort(B); int pivot = B.length; int capacity = 0; for (int i = 0; i < A.length; i++) { int a = A[i]; int newPivot = binarySearch(B, a, K, 0, pivot); if (newPivot == -1) { if (--capacity < 0) { return false; } } else { capacity += pivot - newPivot - 1; pivot = newPivot; } } return true; } int binarySearch(int[] B, int a, int K, int start, int end) { int answer = -1; while (start < end) { int midIndex = start + (end - start) / 2; int mid = B[midIndex]; if (mid + a >= K) { answer = midIndex; end = midIndex; } else { start = midIndex + 1; } } return answer; } void run(BufferedReader rd) throws Exception { PrintWriter pw = new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out))); int T = Integer.parseInt(rd.readLine()); for (int i = 0; i < T; i++) { StringTokenizer st = new StringTokenizer(rd.readLine()); int N = Integer.parseInt(st.nextToken()); int K = Integer.parseInt(st.nextToken()); int[] A = new int[N]; int[] B = new int[N]; st = new StringTokenizer(rd.readLine()); for (int j = 0; j < N; j++) { A[j] = Integer.parseInt(st.nextToken()); } st = new StringTokenizer(rd.readLine()); for (int j = 0; j < N; j++) { B[j] = Integer.parseInt(st.nextToken()); } pw.println(solve(A, B, K) ? "YES" : "NO"); } rd.close(); pw.close(); } public static void main(String[] args) throws Exception { BufferedReader rd = args.length > 0 ? new BufferedReader(new FileReader(args[0])) : new BufferedReader(new InputStreamReader(System.in)); new Solution().run(rd); } }
Add solution to two-arrays problem
Add solution to two-arrays problem
Java
mit
shengmin/coding-problem,shengmin/coding-problem,shengmin/coding-problem,shengmin/coding-problem
java
## Code Before: public class Solution { public static void main(String[] args) throws Exception { } } ## Instruction: Add solution to two-arrays problem ## Code After: import java.io.*; import java.util.Arrays; import java.util.StringTokenizer; /** * @author shengmin */ public class Solution { boolean solve(int[] A, int[] B, int K) { Arrays.sort(A); Arrays.sort(B); int pivot = B.length; int capacity = 0; for (int i = 0; i < A.length; i++) { int a = A[i]; int newPivot = binarySearch(B, a, K, 0, pivot); if (newPivot == -1) { if (--capacity < 0) { return false; } } else { capacity += pivot - newPivot - 1; pivot = newPivot; } } return true; } int binarySearch(int[] B, int a, int K, int start, int end) { int answer = -1; while (start < end) { int midIndex = start + (end - start) / 2; int mid = B[midIndex]; if (mid + a >= K) { answer = midIndex; end = midIndex; } else { start = midIndex + 1; } } return answer; } void run(BufferedReader rd) throws Exception { PrintWriter pw = new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out))); int T = Integer.parseInt(rd.readLine()); for (int i = 0; i < T; i++) { StringTokenizer st = new StringTokenizer(rd.readLine()); int N = Integer.parseInt(st.nextToken()); int K = Integer.parseInt(st.nextToken()); int[] A = new int[N]; int[] B = new int[N]; st = new StringTokenizer(rd.readLine()); for (int j = 0; j < N; j++) { A[j] = Integer.parseInt(st.nextToken()); } st = new StringTokenizer(rd.readLine()); for (int j = 0; j < N; j++) { B[j] = Integer.parseInt(st.nextToken()); } pw.println(solve(A, B, K) ? "YES" : "NO"); } rd.close(); pw.close(); } public static void main(String[] args) throws Exception { BufferedReader rd = args.length > 0 ? new BufferedReader(new FileReader(args[0])) : new BufferedReader(new InputStreamReader(System.in)); new Solution().run(rd); } }
# ... existing code ... import java.io.*; import java.util.Arrays; import java.util.StringTokenizer; /** * @author shengmin */ public class Solution { boolean solve(int[] A, int[] B, int K) { Arrays.sort(A); Arrays.sort(B); int pivot = B.length; int capacity = 0; for (int i = 0; i < A.length; i++) { int a = A[i]; int newPivot = binarySearch(B, a, K, 0, pivot); if (newPivot == -1) { if (--capacity < 0) { return false; } } else { capacity += pivot - newPivot - 1; pivot = newPivot; } } return true; } int binarySearch(int[] B, int a, int K, int start, int end) { int answer = -1; while (start < end) { int midIndex = start + (end - start) / 2; int mid = B[midIndex]; if (mid + a >= K) { answer = midIndex; end = midIndex; } else { start = midIndex + 1; } } return answer; } void run(BufferedReader rd) throws Exception { PrintWriter pw = new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out))); int T = Integer.parseInt(rd.readLine()); for (int i = 0; i < T; i++) { StringTokenizer st = new StringTokenizer(rd.readLine()); int N = Integer.parseInt(st.nextToken()); int K = Integer.parseInt(st.nextToken()); int[] A = new int[N]; int[] B = new int[N]; st = new StringTokenizer(rd.readLine()); for (int j = 0; j < N; j++) { A[j] = Integer.parseInt(st.nextToken()); } st = new StringTokenizer(rd.readLine()); for (int j = 0; j < N; j++) { B[j] = Integer.parseInt(st.nextToken()); } pw.println(solve(A, B, K) ? "YES" : "NO"); } rd.close(); pw.close(); } public static void main(String[] args) throws Exception { BufferedReader rd = args.length > 0 ? new BufferedReader(new FileReader(args[0])) : new BufferedReader(new InputStreamReader(System.in)); new Solution().run(rd); } } # ... rest of the code ...
e38a3b616df2f5f54ec0161ed8d06737e8ed60b3
sleepybaby-api/src/main/kotlin/org/jordens/sleepybaby/auth/TokenAuthenticationService.kt
sleepybaby-api/src/main/kotlin/org/jordens/sleepybaby/auth/TokenAuthenticationService.kt
package org.jordens.sleepybaby.auth import org.springframework.security.authentication.UsernamePasswordAuthenticationToken import io.jsonwebtoken.Jwts import io.jsonwebtoken.SignatureAlgorithm import org.springframework.security.core.Authentication import java.util.* import javax.servlet.http.HttpServletRequest import javax.servlet.http.HttpServletResponse class TokenAuthenticationService { val EXPIRATIONTIME: Long = 60 * 60 * 1000 // 1 hour val SECRET = "ThisIsASecret" val TOKEN_PREFIX = "Bearer" val HEADER_STRING = "Authorization" fun addAuthentication(res: HttpServletResponse, username: String) { val JWT = Jwts.builder() .setSubject(username) .setExpiration(Date(System.currentTimeMillis() + EXPIRATIONTIME)) .signWith(SignatureAlgorithm.HS512, SECRET) .compact() res.addHeader(HEADER_STRING, TOKEN_PREFIX + " " + JWT) } fun getAuthentication(request: HttpServletRequest): Authentication? { val token = request.getHeader(HEADER_STRING) if (token != null) { // parse the token. val user = Jwts.parser() .setSigningKey(SECRET) .parseClaimsJws(token!!.replace(TOKEN_PREFIX, "")) .body .subject return if (user != null) UsernamePasswordAuthenticationToken(user, null, emptyList()) else null } return null } }
package org.jordens.sleepybaby.auth import org.springframework.security.authentication.UsernamePasswordAuthenticationToken import io.jsonwebtoken.Jwts import io.jsonwebtoken.SignatureAlgorithm import org.springframework.security.core.Authentication import java.util.* import javax.servlet.http.HttpServletRequest import javax.servlet.http.HttpServletResponse class TokenAuthenticationService { val EXPIRATIONTIME: Long = 60 * 60 * 1000 // 1 hour val SECRET = "ThisIsASecret" val TOKEN_PREFIX = "Bearer" val HEADER_STRING = "Authorization" fun addAuthentication(res: HttpServletResponse, username: String) { val JWT = Jwts.builder() .setSubject(username) .setExpiration(Date(System.currentTimeMillis() + EXPIRATIONTIME)) .signWith(SignatureAlgorithm.HS512, SECRET) .compact() res.addHeader(HEADER_STRING, TOKEN_PREFIX + " " + JWT) } fun getAuthentication(request: HttpServletRequest): Authentication? { val token = request.getHeader(HEADER_STRING) if (token != null) { // parse the token. val body = Jwts.parser() .setSigningKey(SECRET) .parseClaimsJws(token!!.replace(TOKEN_PREFIX, "")) .body val user = body.subject val expiration = body.expiration return if (user == null || (expiration != null && expiration.before(Date()))) null else UsernamePasswordAuthenticationToken(user, null, emptyList()) } return null } }
Verify JWT token is not expired
Verify JWT token is not expired
Kotlin
apache-2.0
ajordens/lalas,ajordens/sleepybaby,ajordens/lalas,ajordens/sleepybaby,ajordens/sleepybaby,ajordens/lalas,ajordens/lalas,ajordens/lalas,ajordens/sleepybaby,ajordens/sleepybaby,ajordens/sleepybaby
kotlin
## Code Before: package org.jordens.sleepybaby.auth import org.springframework.security.authentication.UsernamePasswordAuthenticationToken import io.jsonwebtoken.Jwts import io.jsonwebtoken.SignatureAlgorithm import org.springframework.security.core.Authentication import java.util.* import javax.servlet.http.HttpServletRequest import javax.servlet.http.HttpServletResponse class TokenAuthenticationService { val EXPIRATIONTIME: Long = 60 * 60 * 1000 // 1 hour val SECRET = "ThisIsASecret" val TOKEN_PREFIX = "Bearer" val HEADER_STRING = "Authorization" fun addAuthentication(res: HttpServletResponse, username: String) { val JWT = Jwts.builder() .setSubject(username) .setExpiration(Date(System.currentTimeMillis() + EXPIRATIONTIME)) .signWith(SignatureAlgorithm.HS512, SECRET) .compact() res.addHeader(HEADER_STRING, TOKEN_PREFIX + " " + JWT) } fun getAuthentication(request: HttpServletRequest): Authentication? { val token = request.getHeader(HEADER_STRING) if (token != null) { // parse the token. val user = Jwts.parser() .setSigningKey(SECRET) .parseClaimsJws(token!!.replace(TOKEN_PREFIX, "")) .body .subject return if (user != null) UsernamePasswordAuthenticationToken(user, null, emptyList()) else null } return null } } ## Instruction: Verify JWT token is not expired ## Code After: package org.jordens.sleepybaby.auth import org.springframework.security.authentication.UsernamePasswordAuthenticationToken import io.jsonwebtoken.Jwts import io.jsonwebtoken.SignatureAlgorithm import org.springframework.security.core.Authentication import java.util.* import javax.servlet.http.HttpServletRequest import javax.servlet.http.HttpServletResponse class TokenAuthenticationService { val EXPIRATIONTIME: Long = 60 * 60 * 1000 // 1 hour val SECRET = "ThisIsASecret" val TOKEN_PREFIX = "Bearer" val HEADER_STRING = "Authorization" fun addAuthentication(res: HttpServletResponse, username: String) { val JWT = Jwts.builder() .setSubject(username) .setExpiration(Date(System.currentTimeMillis() + EXPIRATIONTIME)) .signWith(SignatureAlgorithm.HS512, SECRET) .compact() res.addHeader(HEADER_STRING, TOKEN_PREFIX + " " + JWT) } fun getAuthentication(request: HttpServletRequest): Authentication? { val token = request.getHeader(HEADER_STRING) if (token != null) { // parse the token. val body = Jwts.parser() .setSigningKey(SECRET) .parseClaimsJws(token!!.replace(TOKEN_PREFIX, "")) .body val user = body.subject val expiration = body.expiration return if (user == null || (expiration != null && expiration.before(Date()))) null else UsernamePasswordAuthenticationToken(user, null, emptyList()) } return null } }
# ... existing code ... val token = request.getHeader(HEADER_STRING) if (token != null) { // parse the token. val body = Jwts.parser() .setSigningKey(SECRET) .parseClaimsJws(token!!.replace(TOKEN_PREFIX, "")) .body val user = body.subject val expiration = body.expiration return if (user == null || (expiration != null && expiration.before(Date()))) null else UsernamePasswordAuthenticationToken(user, null, emptyList()) } return null } # ... rest of the code ...
94142e31d4189fbcf152eeb6b9ad89d684f1a6d0
autoload/splicelib/util/io.py
autoload/splicelib/util/io.py
import sys def error(m): sys.stderr.write(str(m) + '\n')
import sys import vim def error(m): sys.stderr.write(str(m) + '\n') def echomsg(m): vim.command('echomsg "%s"' % m)
Add a utility for echoing in the IO utils.
Add a utility for echoing in the IO utils.
Python
mit
sjl/splice.vim,sjl/splice.vim
python
## Code Before: import sys def error(m): sys.stderr.write(str(m) + '\n') ## Instruction: Add a utility for echoing in the IO utils. ## Code After: import sys import vim def error(m): sys.stderr.write(str(m) + '\n') def echomsg(m): vim.command('echomsg "%s"' % m)
// ... existing code ... import sys import vim def error(m): sys.stderr.write(str(m) + '\n') def echomsg(m): vim.command('echomsg "%s"' % m) // ... rest of the code ...
853bf035fcb9ea21e648cb0b1d1b13ee68f8e9cc
importer/tests/test_utils.py
importer/tests/test_utils.py
from unittest import TestCase from importer.utils import find_first class FindFirstTestCase(TestCase): def test_first_in_haystack(self): self.assertEqual( find_first( ['one', 'two', 'three'], ['one', 'four'] ), 'one', ) def test_second_in_haystack(self): self.assertEqual( find_first( ['one', 'two', 'three'], ['two', 'four'] ), 'two', ) def test_none_present(self): self.assertIsNone( find_first( ['one', 'two', 'three'], ['four'] ) )
from unittest import TestCase from importer.utils import ( maybe, find_first, ) class MaybeTestCase(TestCase): def setUp(self): self.add_ten = maybe(lambda x: x + 10) def test_with_none(self): self.assertIsNone(self.add_ten(None)) def test_with_different_value(self): self.assertEqual(self.add_ten(20), 30) class FindFirstTestCase(TestCase): def test_first_in_haystack(self): self.assertEqual( find_first( ['one', 'two', 'three'], ['one', 'four'] ), 'one', ) def test_second_in_haystack(self): self.assertEqual( find_first( ['one', 'two', 'three'], ['two', 'four'] ), 'two', ) def test_none_present(self): self.assertIsNone( find_first( ['one', 'two', 'three'], ['four'] ) )
Add tests for maybe decorator
Add tests for maybe decorator
Python
mit
despawnerer/theatrics,despawnerer/theatrics,despawnerer/theatrics
python
## Code Before: from unittest import TestCase from importer.utils import find_first class FindFirstTestCase(TestCase): def test_first_in_haystack(self): self.assertEqual( find_first( ['one', 'two', 'three'], ['one', 'four'] ), 'one', ) def test_second_in_haystack(self): self.assertEqual( find_first( ['one', 'two', 'three'], ['two', 'four'] ), 'two', ) def test_none_present(self): self.assertIsNone( find_first( ['one', 'two', 'three'], ['four'] ) ) ## Instruction: Add tests for maybe decorator ## Code After: from unittest import TestCase from importer.utils import ( maybe, find_first, ) class MaybeTestCase(TestCase): def setUp(self): self.add_ten = maybe(lambda x: x + 10) def test_with_none(self): self.assertIsNone(self.add_ten(None)) def test_with_different_value(self): self.assertEqual(self.add_ten(20), 30) class FindFirstTestCase(TestCase): def test_first_in_haystack(self): self.assertEqual( find_first( ['one', 'two', 'three'], ['one', 'four'] ), 'one', ) def test_second_in_haystack(self): self.assertEqual( find_first( ['one', 'two', 'three'], ['two', 'four'] ), 'two', ) def test_none_present(self): self.assertIsNone( find_first( ['one', 'two', 'three'], ['four'] ) )
// ... existing code ... from unittest import TestCase from importer.utils import ( maybe, find_first, ) class MaybeTestCase(TestCase): def setUp(self): self.add_ten = maybe(lambda x: x + 10) def test_with_none(self): self.assertIsNone(self.add_ten(None)) def test_with_different_value(self): self.assertEqual(self.add_ten(20), 30) class FindFirstTestCase(TestCase): // ... rest of the code ...
ca06bf1d52cd51ccec178c98ad407bfe59f1ada1
strobe.py
strobe.py
import RPi.GPIO as GPIO from time import sleep def onoff(period, pin): """Symmetric square wave, equal time on/off""" half_cycle = period / 2.0 GPIO.output(pin, GPIO.HIGH) sleep(half_cycle) GPIO.output(pin, GPIO.LOW) sleep(half_cycle) def strobe(freq, dur, pin): nflashes = freq * dur seconds_to_sleep = 1.0 / freq # Use Raspberry-Pi board pin numbers. In other words, 11 means pin # number 11, not GPIO 11. GPIO.setmode(GPIO.BOARD) GPIO.setup(pin, GPIO.OUT) # requires root? for i in range(nflashes): onoff(seconds_to_sleep, pin) GPIO.cleanup()
import RPi.GPIO as GPIO from time import sleep def onoff(ontime, offtime, pin): GPIO.output(pin, GPIO.HIGH) sleep(ontime) GPIO.output(pin, GPIO.LOW) sleep(offtime) def strobe(freq, dur, pin): nflashes = freq * dur period = 1.0 / freq # Use Raspberry-Pi board pin numbers. In other words, 11 means pin # number 11, not GPIO 11. GPIO.setmode(GPIO.BOARD) GPIO.setup(pin, GPIO.OUT) # requires root? for i in range(nflashes): onoff(period/2.0, period/2.0, pin) GPIO.cleanup()
Make onoff function more versatile
Make onoff function more versatile
Python
mit
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
python
## Code Before: import RPi.GPIO as GPIO from time import sleep def onoff(period, pin): """Symmetric square wave, equal time on/off""" half_cycle = period / 2.0 GPIO.output(pin, GPIO.HIGH) sleep(half_cycle) GPIO.output(pin, GPIO.LOW) sleep(half_cycle) def strobe(freq, dur, pin): nflashes = freq * dur seconds_to_sleep = 1.0 / freq # Use Raspberry-Pi board pin numbers. In other words, 11 means pin # number 11, not GPIO 11. GPIO.setmode(GPIO.BOARD) GPIO.setup(pin, GPIO.OUT) # requires root? for i in range(nflashes): onoff(seconds_to_sleep, pin) GPIO.cleanup() ## Instruction: Make onoff function more versatile ## Code After: import RPi.GPIO as GPIO from time import sleep def onoff(ontime, offtime, pin): GPIO.output(pin, GPIO.HIGH) sleep(ontime) GPIO.output(pin, GPIO.LOW) sleep(offtime) def strobe(freq, dur, pin): nflashes = freq * dur period = 1.0 / freq # Use Raspberry-Pi board pin numbers. In other words, 11 means pin # number 11, not GPIO 11. GPIO.setmode(GPIO.BOARD) GPIO.setup(pin, GPIO.OUT) # requires root? for i in range(nflashes): onoff(period/2.0, period/2.0, pin) GPIO.cleanup()
... import RPi.GPIO as GPIO from time import sleep def onoff(ontime, offtime, pin): GPIO.output(pin, GPIO.HIGH) sleep(ontime) GPIO.output(pin, GPIO.LOW) sleep(offtime) def strobe(freq, dur, pin): nflashes = freq * dur period = 1.0 / freq # Use Raspberry-Pi board pin numbers. In other words, 11 means pin # number 11, not GPIO 11. ... GPIO.setup(pin, GPIO.OUT) # requires root? for i in range(nflashes): onoff(period/2.0, period/2.0, pin) GPIO.cleanup() ...
d3bfb0d65314df39a42390dd5a7d40dd7a61b758
myname.py
myname.py
"""Little module to find the path of a Cosmo box simulation""" import os.path as path base=path.expanduser("~/data/Cosmo/") def get_name(sim, ff=True): """Get the directory for a simulation""" halo = "Cosmo"+str(sim)+"_V6" if ff: halo=path.join(halo,"L25n512/output") else: halo=path.join(halo,"L25n256") return path.join(base, halo)
"""Little module to find the path of a Cosmo box simulation""" import os.path as path base=path.expanduser("~/data/Cosmo/") def get_name(sim, ff=True, box=25): """Get the directory for a simulation""" halo = "Cosmo"+str(sim)+"_V6" if ff: halo=path.join(halo,"L"+str(box)+"n512/output") else: halo=path.join(halo,"L"+str(box)+"256") return path.join(base, halo)
Allow loading of different box sizes
Allow loading of different box sizes
Python
mit
sbird/fake_spectra,sbird/fake_spectra,sbird/fake_spectra
python
## Code Before: """Little module to find the path of a Cosmo box simulation""" import os.path as path base=path.expanduser("~/data/Cosmo/") def get_name(sim, ff=True): """Get the directory for a simulation""" halo = "Cosmo"+str(sim)+"_V6" if ff: halo=path.join(halo,"L25n512/output") else: halo=path.join(halo,"L25n256") return path.join(base, halo) ## Instruction: Allow loading of different box sizes ## Code After: """Little module to find the path of a Cosmo box simulation""" import os.path as path base=path.expanduser("~/data/Cosmo/") def get_name(sim, ff=True, box=25): """Get the directory for a simulation""" halo = "Cosmo"+str(sim)+"_V6" if ff: halo=path.join(halo,"L"+str(box)+"n512/output") else: halo=path.join(halo,"L"+str(box)+"256") return path.join(base, halo)
// ... existing code ... base=path.expanduser("~/data/Cosmo/") def get_name(sim, ff=True, box=25): """Get the directory for a simulation""" halo = "Cosmo"+str(sim)+"_V6" if ff: halo=path.join(halo,"L"+str(box)+"n512/output") else: halo=path.join(halo,"L"+str(box)+"256") return path.join(base, halo) // ... rest of the code ...
21193559b063e85f26971d5ae6181a0bd097cda3
tests/utilities_test.py
tests/utilities_test.py
import pytest import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n"
import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" ############ # Vector # ############ @pyop.vector def multFirstColumn(column): img = column.reshape((2, 2), order = 'C') img[:, 0] *= 2 return img.flatten(0) def testVectorOnMatrix(): np.testing.assert_allclose( multFirstColumn(np.array([[1, 1, 1, 1], [2, 1, 2, 1]]).T), np.array([[2, 4], [1, 1], [2, 4], [1, 1]])) def testVectorOnVector(): np.testing.assert_allclose( multFirstColumn(np.array([1, 1, 1, 1])), np.array(np.array([2, 1, 2, 1])))
Test vector, passes matrix and vector input.
Test vector, passes matrix and vector input.
Python
bsd-3-clause
ryanorendorff/pyop
python
## Code Before: import pytest import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" ## Instruction: Test vector, passes matrix and vector input. ## Code After: import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" ############ # Vector # ############ @pyop.vector def multFirstColumn(column): img = column.reshape((2, 2), order = 'C') img[:, 0] *= 2 return img.flatten(0) def testVectorOnMatrix(): np.testing.assert_allclose( multFirstColumn(np.array([[1, 1, 1, 1], [2, 1, 2, 1]]).T), np.array([[2, 4], [1, 1], [2, 4], [1, 1]])) def testVectorOnVector(): np.testing.assert_allclose( multFirstColumn(np.array([1, 1, 1, 1])), np.array(np.array([2, 1, 2, 1])))
... import pyop import numpy as np ... np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" ############ # Vector # ############ @pyop.vector def multFirstColumn(column): img = column.reshape((2, 2), order = 'C') img[:, 0] *= 2 return img.flatten(0) def testVectorOnMatrix(): np.testing.assert_allclose( multFirstColumn(np.array([[1, 1, 1, 1], [2, 1, 2, 1]]).T), np.array([[2, 4], [1, 1], [2, 4], [1, 1]])) def testVectorOnVector(): np.testing.assert_allclose( multFirstColumn(np.array([1, 1, 1, 1])), np.array(np.array([2, 1, 2, 1]))) ...
b6bf01a5c95da0de1e6831a3cf41243e69297854
setup.py
setup.py
try: import multiprocessing except ImportError: pass import setuptools import ryu.hooks ryu.hooks.save_orig() setuptools.setup(name='ryu', setup_requires=['pbr'], pbr=True)
import setuptools import ryu.hooks ryu.hooks.save_orig() setuptools.setup(name='ryu', setup_requires=['pbr'], pbr=True)
Remove workaround for issue with older python versions.
Remove workaround for issue with older python versions.
Python
apache-2.0
osrg/ryu,osrg/ryu,osrg/ryu,osrg/ryu,osrg/ryu
python
## Code Before: try: import multiprocessing except ImportError: pass import setuptools import ryu.hooks ryu.hooks.save_orig() setuptools.setup(name='ryu', setup_requires=['pbr'], pbr=True) ## Instruction: Remove workaround for issue with older python versions. ## Code After: import setuptools import ryu.hooks ryu.hooks.save_orig() setuptools.setup(name='ryu', setup_requires=['pbr'], pbr=True)
// ... existing code ... import setuptools import ryu.hooks // ... rest of the code ...
e4ea9426a75828c6fce924b895ee3e4603595dc7
tests/templates/components/test_radios_with_images.py
tests/templates/components/test_radios_with_images.py
import json from importlib import metadata from packaging.version import Version def test_govuk_frontend_jinja_overrides_on_design_system_v3(): with open("package.json") as package_file: package_json = json.load(package_file) govuk_frontend_version = Version(package_json["dependencies"]["govuk-frontend"]) govuk_frontend_jinja_version = Version(metadata.version("govuk-frontend-jinja")) # This should be checking govuk_frontend_version == 3.14.x, but we're not there yet. Update this when we are. # Compatibility between these two libs is defined at https://github.com/LandRegistry/govuk-frontend-jinja/ correct_govuk_frontend_version = Version("3.0.0") <= govuk_frontend_version < Version("4.0.0") correct_govuk_frontend_jinja_version = Version("1.5.0") <= govuk_frontend_jinja_version < Version("1.6.0") assert correct_govuk_frontend_version and correct_govuk_frontend_jinja_version, ( "After upgrading either of the Design System packages, you must validate that " "`app/templates/govuk_frontend_jinja_overrides/templates/components/*/template.html`" "are all structurally-correct and up-to-date macros. If not, update the macros or retire them and update the " "rendering process." )
import json from importlib import metadata from packaging.version import Version def test_govuk_frontend_jinja_overrides_on_design_system_v3(): with open("package.json") as package_file: package_json = json.load(package_file) govuk_frontend_version = Version(package_json["dependencies"]["govuk-frontend"]) govuk_frontend_jinja_version = Version(metadata.version("govuk-frontend-jinja")) # Compatibility between these two libs is defined at https://github.com/LandRegistry/govuk-frontend-jinja/ correct_govuk_frontend_version = Version("3.14.0") == govuk_frontend_version correct_govuk_frontend_jinja_version = Version("1.5.1") == govuk_frontend_jinja_version assert correct_govuk_frontend_version and correct_govuk_frontend_jinja_version, ( "After upgrading either of the Design System packages, you must validate that " "`app/templates/govuk_frontend_jinja_overrides/templates/components/*/template.html`" "are all structurally-correct and up-to-date macros. If not, update the macros or retire them and update the " "rendering process." )
Update test for GOVUK Frontend libraries parity
Update test for GOVUK Frontend libraries parity
Python
mit
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
python
## Code Before: import json from importlib import metadata from packaging.version import Version def test_govuk_frontend_jinja_overrides_on_design_system_v3(): with open("package.json") as package_file: package_json = json.load(package_file) govuk_frontend_version = Version(package_json["dependencies"]["govuk-frontend"]) govuk_frontend_jinja_version = Version(metadata.version("govuk-frontend-jinja")) # This should be checking govuk_frontend_version == 3.14.x, but we're not there yet. Update this when we are. # Compatibility between these two libs is defined at https://github.com/LandRegistry/govuk-frontend-jinja/ correct_govuk_frontend_version = Version("3.0.0") <= govuk_frontend_version < Version("4.0.0") correct_govuk_frontend_jinja_version = Version("1.5.0") <= govuk_frontend_jinja_version < Version("1.6.0") assert correct_govuk_frontend_version and correct_govuk_frontend_jinja_version, ( "After upgrading either of the Design System packages, you must validate that " "`app/templates/govuk_frontend_jinja_overrides/templates/components/*/template.html`" "are all structurally-correct and up-to-date macros. If not, update the macros or retire them and update the " "rendering process." ) ## Instruction: Update test for GOVUK Frontend libraries parity ## Code After: import json from importlib import metadata from packaging.version import Version def test_govuk_frontend_jinja_overrides_on_design_system_v3(): with open("package.json") as package_file: package_json = json.load(package_file) govuk_frontend_version = Version(package_json["dependencies"]["govuk-frontend"]) govuk_frontend_jinja_version = Version(metadata.version("govuk-frontend-jinja")) # Compatibility between these two libs is defined at https://github.com/LandRegistry/govuk-frontend-jinja/ correct_govuk_frontend_version = Version("3.14.0") == govuk_frontend_version correct_govuk_frontend_jinja_version = Version("1.5.1") == govuk_frontend_jinja_version assert correct_govuk_frontend_version and correct_govuk_frontend_jinja_version, ( "After upgrading either of the Design System packages, you must validate that " "`app/templates/govuk_frontend_jinja_overrides/templates/components/*/template.html`" "are all structurally-correct and up-to-date macros. If not, update the macros or retire them and update the " "rendering process." )
// ... existing code ... govuk_frontend_jinja_version = Version(metadata.version("govuk-frontend-jinja")) # Compatibility between these two libs is defined at https://github.com/LandRegistry/govuk-frontend-jinja/ correct_govuk_frontend_version = Version("3.14.0") == govuk_frontend_version correct_govuk_frontend_jinja_version = Version("1.5.1") == govuk_frontend_jinja_version assert correct_govuk_frontend_version and correct_govuk_frontend_jinja_version, ( "After upgrading either of the Design System packages, you must validate that " // ... rest of the code ...
932ee2737b822742996f234c90b715771fb876bf
tests/functional/api/view_pdf_test.py
tests/functional/api/view_pdf_test.py
import pytest from tests.conftest import assert_cache_control class TestViewPDFAPI: def test_caching_is_disabled(self, test_app): response = test_app.get("/pdf?url=http://example.com/foo.pdf") assert_cache_control( response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"] )
from tests.conftest import assert_cache_control class TestViewPDFAPI: def test_caching_is_disabled(self, test_app): response = test_app.get("/pdf?url=http://example.com/foo.pdf") assert_cache_control( response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"] )
Fix lint errors after adding missing __init__ files
Fix lint errors after adding missing __init__ files
Python
bsd-2-clause
hypothesis/via,hypothesis/via,hypothesis/via
python
## Code Before: import pytest from tests.conftest import assert_cache_control class TestViewPDFAPI: def test_caching_is_disabled(self, test_app): response = test_app.get("/pdf?url=http://example.com/foo.pdf") assert_cache_control( response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"] ) ## Instruction: Fix lint errors after adding missing __init__ files ## Code After: from tests.conftest import assert_cache_control class TestViewPDFAPI: def test_caching_is_disabled(self, test_app): response = test_app.get("/pdf?url=http://example.com/foo.pdf") assert_cache_control( response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"] )
# ... existing code ... from tests.conftest import assert_cache_control # ... rest of the code ...
c7efd5976f511200162610612fcd5b6f9b013a54
dciclient/v1/utils.py
dciclient/v1/utils.py
import click import json import six def flatten(d, prefix=''): ret = [] for k, v in d.items(): p = k if not prefix else prefix + '.' + k if isinstance(v, dict): ret += flatten(v, prefix=p) else: ret.append("%s=%s" % (p, v)) return ret def print_json(result_json): formatted_result = json.dumps(result_json, indent=4) click.echo(formatted_result) def sanitize_kwargs(**kwargs): kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v) try: kwargs['data'] = json.loads(kwargs['data']) except KeyError: pass return kwargs
import click import json import six def flatten(d, prefix=''): ret = [] for k, v in d.items(): p = k if not prefix else prefix + '.' + k if isinstance(v, dict): ret += flatten(v, prefix=p) else: ret.append("%s=%s" % (p, v)) return ret def print_json(result_json): formatted_result = json.dumps(result_json, indent=4) click.echo(formatted_result) def sanitize_kwargs(**kwargs): kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v) try: kwargs['data'] = json.loads(kwargs['data']) except KeyError: pass except TypeError: pass return kwargs
Fix TypeError exception when parsing json
Fix TypeError exception when parsing json This change fixes the TypeError exception that is raised when it should not while parsing json File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads return _default_decoder.decode(s) File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode obj, end = self.raw_decode(s, idx=_w(s, 0).end()) TypeError: expected string or buffer Change-Id: I1b9670adcc505084fecb54a45ce11029dc8a4d93
Python
apache-2.0
redhat-cip/python-dciclient,redhat-cip/python-dciclient
python
## Code Before: import click import json import six def flatten(d, prefix=''): ret = [] for k, v in d.items(): p = k if not prefix else prefix + '.' + k if isinstance(v, dict): ret += flatten(v, prefix=p) else: ret.append("%s=%s" % (p, v)) return ret def print_json(result_json): formatted_result = json.dumps(result_json, indent=4) click.echo(formatted_result) def sanitize_kwargs(**kwargs): kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v) try: kwargs['data'] = json.loads(kwargs['data']) except KeyError: pass return kwargs ## Instruction: Fix TypeError exception when parsing json This change fixes the TypeError exception that is raised when it should not while parsing json File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads return _default_decoder.decode(s) File "/usr/lib64/python2.7/json/decoder.py", line 366, in decode obj, end = self.raw_decode(s, idx=_w(s, 0).end()) TypeError: expected string or buffer Change-Id: I1b9670adcc505084fecb54a45ce11029dc8a4d93 ## Code After: import click import json import six def flatten(d, prefix=''): ret = [] for k, v in d.items(): p = k if not prefix else prefix + '.' + k if isinstance(v, dict): ret += flatten(v, prefix=p) else: ret.append("%s=%s" % (p, v)) return ret def print_json(result_json): formatted_result = json.dumps(result_json, indent=4) click.echo(formatted_result) def sanitize_kwargs(**kwargs): kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v) try: kwargs['data'] = json.loads(kwargs['data']) except KeyError: pass except TypeError: pass return kwargs
# ... existing code ... kwargs['data'] = json.loads(kwargs['data']) except KeyError: pass except TypeError: pass return kwargs # ... rest of the code ...
e01b0c9129c05e366605639553201f0dc2af2756
django_fsm_log/apps.py
django_fsm_log/apps.py
from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" default_auto_field = 'django.db.models.BigAutoField' def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback)
from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback)
Revert "Solve warning coming from django 4.0"
Revert "Solve warning coming from django 4.0"
Python
mit
gizmag/django-fsm-log,ticosax/django-fsm-log
python
## Code Before: from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" default_auto_field = 'django.db.models.BigAutoField' def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback) ## Instruction: Revert "Solve warning coming from django 4.0" ## Code After: from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback)
// ... existing code ... class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) // ... rest of the code ...
ca50295c71432dde32eff813e5bd05b7a8e40ad1
cdflib/__init__.py
cdflib/__init__.py
import os from . import cdfread from . import cdfwrite from .epochs import CDFepoch as cdfepoch # This function determines if we are reading or writing a file def CDF(path, cdf_spec=None, delete=False, validate=None): if (os.path.exists(path)): if delete: os.remove(path) return else: return cdfread.CDF(path, validate=validate) else: return cdfwrite.CDF(path, cdf_spec=cdf_spec, delete=delete)
import os from . import cdfread from . import cdfwrite from .epochs import CDFepoch as cdfepoch # This function determines if we are reading or writing a file def CDF(path, cdf_spec=None, delete=False, validate=None): path = os.path.expanduser(path) if (os.path.exists(path)): if delete: os.remove(path) return else: return cdfread.CDF(path, validate=validate) else: return cdfwrite.CDF(path, cdf_spec=cdf_spec, delete=delete)
Expand user path when reading CDF
Expand user path when reading CDF
Python
mit
MAVENSDC/cdflib
python
## Code Before: import os from . import cdfread from . import cdfwrite from .epochs import CDFepoch as cdfepoch # This function determines if we are reading or writing a file def CDF(path, cdf_spec=None, delete=False, validate=None): if (os.path.exists(path)): if delete: os.remove(path) return else: return cdfread.CDF(path, validate=validate) else: return cdfwrite.CDF(path, cdf_spec=cdf_spec, delete=delete) ## Instruction: Expand user path when reading CDF ## Code After: import os from . import cdfread from . import cdfwrite from .epochs import CDFepoch as cdfepoch # This function determines if we are reading or writing a file def CDF(path, cdf_spec=None, delete=False, validate=None): path = os.path.expanduser(path) if (os.path.exists(path)): if delete: os.remove(path) return else: return cdfread.CDF(path, validate=validate) else: return cdfwrite.CDF(path, cdf_spec=cdf_spec, delete=delete)
// ... existing code ... def CDF(path, cdf_spec=None, delete=False, validate=None): path = os.path.expanduser(path) if (os.path.exists(path)): if delete: os.remove(path) // ... rest of the code ...
f794c6ed1f6be231d79ac35759ad76270c3e14e0
brains/mapping/admin.py
brains/mapping/admin.py
from django.contrib import admin from mapping.models import Location, Report class LocationAdmin(admin.ModelAdmin): fieldsets = ((None, {'fields': ( ('name', 'suburb'), ('x', 'y'), 'building_type' )} ),) list_display = ['name', 'x', 'y', 'suburb'] list_filter = ['suburb'] search_fields = ['name'] readonly_fields = ['x', 'y', 'name', 'building_type', 'suburb'] actions = None def has_add_permission(self, request): return False class ReportAdmin(admin.ModelAdmin): fieldsets = ((None, {'fields': ('location', ('zombies_only', 'inside'), ('is_ruined', 'is_illuminated', 'has_tree'), ('zombies_present', 'barricade_level'), 'players', ('reported_by', 'origin', 'reported_date') )} ),) readonly_fields = ['players', 'reported_date'] admin.site.register(Location, LocationAdmin) admin.site.register(Report, ReportAdmin)
from django.contrib import admin from mapping.models import Location, Report class LocationAdmin(admin.ModelAdmin): fieldsets = ((None, {'fields': ( ('name', 'suburb'), ('x', 'y'), 'building_type' )} ),) list_display = ['name', 'x', 'y', 'suburb'] list_filter = ['suburb'] search_fields = ['name'] readonly_fields = ['x', 'y', 'name', 'building_type', 'suburb'] actions = None def has_add_permission(self, request): return False class ReportAdmin(admin.ModelAdmin): fieldsets = ((None, {'fields': ('location', ('zombies_only', 'inside'), ('is_ruined', 'is_illuminated', 'has_tree'), ('zombies_present', 'barricade_level'), 'players', ('reported_by', 'origin'), 'reported_date', )} ),) readonly_fields = ['location', 'zombies_only', 'inside', 'is_ruined', 'is_illuminated', 'has_tree', 'zombies_present', 'barricade_level', 'players', 'reported_by', 'origin', 'reported_date'] admin.site.register(Location, LocationAdmin) admin.site.register(Report, ReportAdmin)
Set everything on the report read only.
Set everything on the report read only.
Python
bsd-3-clause
crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains
python
## Code Before: from django.contrib import admin from mapping.models import Location, Report class LocationAdmin(admin.ModelAdmin): fieldsets = ((None, {'fields': ( ('name', 'suburb'), ('x', 'y'), 'building_type' )} ),) list_display = ['name', 'x', 'y', 'suburb'] list_filter = ['suburb'] search_fields = ['name'] readonly_fields = ['x', 'y', 'name', 'building_type', 'suburb'] actions = None def has_add_permission(self, request): return False class ReportAdmin(admin.ModelAdmin): fieldsets = ((None, {'fields': ('location', ('zombies_only', 'inside'), ('is_ruined', 'is_illuminated', 'has_tree'), ('zombies_present', 'barricade_level'), 'players', ('reported_by', 'origin', 'reported_date') )} ),) readonly_fields = ['players', 'reported_date'] admin.site.register(Location, LocationAdmin) admin.site.register(Report, ReportAdmin) ## Instruction: Set everything on the report read only. ## Code After: from django.contrib import admin from mapping.models import Location, Report class LocationAdmin(admin.ModelAdmin): fieldsets = ((None, {'fields': ( ('name', 'suburb'), ('x', 'y'), 'building_type' )} ),) list_display = ['name', 'x', 'y', 'suburb'] list_filter = ['suburb'] search_fields = ['name'] readonly_fields = ['x', 'y', 'name', 'building_type', 'suburb'] actions = None def has_add_permission(self, request): return False class ReportAdmin(admin.ModelAdmin): fieldsets = ((None, {'fields': ('location', ('zombies_only', 'inside'), ('is_ruined', 'is_illuminated', 'has_tree'), ('zombies_present', 'barricade_level'), 'players', ('reported_by', 'origin'), 'reported_date', )} ),) readonly_fields = ['location', 'zombies_only', 'inside', 'is_ruined', 'is_illuminated', 'has_tree', 'zombies_present', 'barricade_level', 'players', 'reported_by', 'origin', 'reported_date'] admin.site.register(Location, LocationAdmin) admin.site.register(Report, ReportAdmin)
# ... existing code ... ('is_ruined', 'is_illuminated', 'has_tree'), ('zombies_present', 'barricade_level'), 'players', ('reported_by', 'origin'), 'reported_date', )} ),) readonly_fields = ['location', 'zombies_only', 'inside', 'is_ruined', 'is_illuminated', 'has_tree', 'zombies_present', 'barricade_level', 'players', 'reported_by', 'origin', 'reported_date'] admin.site.register(Location, LocationAdmin) # ... rest of the code ...
25355d2169bb43eb07b9a60a2f9e4c0436cf906a
vm/external_libs/libtommath/bn_mp_exch.c
vm/external_libs/libtommath/bn_mp_exch.c
/* LibTomMath, multiple-precision integer library -- Tom St Denis * * LibTomMath is a library that provides multiple-precision * integer arithmetic as well as number theoretic functionality. * * The library was designed directly after the MPI library by * Michael Fromberger but has been written from scratch with * additional optimizations in place. * * The library is free for all purposes without any express * guarantee it works. * * Tom St Denis, [email protected], http://libtom.org */ /* swap the elements of two integers, for cases where you can't simply swap the * mp_int pointers around */ void mp_exch MPA(mp_int * a, mp_int * b) { mp_int t; if(MANAGED(a) || MANAGED(b)) { mp_init_copy(MPST, &t, a); mp_copy(MPST, a, b); mp_copy(MPST, b, &t); mp_clear(&t); return; } t = *a; *a = *b; *b = t; } #endif /* $Source: /cvs/libtom/libtommath/bn_mp_exch.c,v $ */ /* $Revision: 1.4 $ */ /* $Date: 2006/12/28 01:25:13 $ */
/* LibTomMath, multiple-precision integer library -- Tom St Denis * * LibTomMath is a library that provides multiple-precision * integer arithmetic as well as number theoretic functionality. * * The library was designed directly after the MPI library by * Michael Fromberger but has been written from scratch with * additional optimizations in place. * * The library is free for all purposes without any express * guarantee it works. * * Tom St Denis, [email protected], http://libtom.org */ /* swap the elements of two integers, for cases where you can't simply swap the * mp_int pointers around */ void mp_exch MPA(mp_int * a, mp_int * b) { mp_int t; if(MANAGED(a) || MANAGED(b)) { mp_init(&t); // copy a to t mp_copy(MPST, a, &t); // copy b to a mp_copy(MPST, b, a); // copy t to b mp_copy(MPST, &t, b); mp_clear(&t); return; } t = *a; *a = *b; *b = t; } #endif /* $Source: /cvs/libtom/libtommath/bn_mp_exch.c,v $ */ /* $Revision: 1.4 $ */ /* $Date: 2006/12/28 01:25:13 $ */
Fix argument order in mp_exch
Fix argument order in mp_exch
C
bsd-3-clause
heftig/rubinius,Wirachmat/rubinius,dblock/rubinius,Azizou/rubinius,heftig/rubinius,Azizou/rubinius,ruipserra/rubinius,travis-repos/rubinius,jemc/rubinius,slawosz/rubinius,Wirachmat/rubinius,slawosz/rubinius,dblock/rubinius,pH14/rubinius,ngpestelos/rubinius,travis-repos/rubinius,benlovell/rubinius,lgierth/rubinius,pH14/rubinius,ngpestelos/rubinius,dblock/rubinius,Wirachmat/rubinius,travis-repos/rubinius,kachick/rubinius,jsyeo/rubinius,digitalextremist/rubinius,heftig/rubinius,sferik/rubinius,heftig/rubinius,slawosz/rubinius,sferik/rubinius,jemc/rubinius,ruipserra/rubinius,sferik/rubinius,jsyeo/rubinius,jemc/rubinius,lgierth/rubinius,digitalextremist/rubinius,jemc/rubinius,slawosz/rubinius,kachick/rubinius,lgierth/rubinius,jsyeo/rubinius,ngpestelos/rubinius,ngpestelos/rubinius,ruipserra/rubinius,ruipserra/rubinius,digitalextremist/rubinius,kachick/rubinius,ngpestelos/rubinius,pH14/rubinius,ruipserra/rubinius,kachick/rubinius,benlovell/rubinius,digitalextremist/rubinius,sferik/rubinius,lgierth/rubinius,lgierth/rubinius,travis-repos/rubinius,jsyeo/rubinius,Azizou/rubinius,jemc/rubinius,Azizou/rubinius,pH14/rubinius,benlovell/rubinius,kachick/rubinius,benlovell/rubinius,Wirachmat/rubinius,travis-repos/rubinius,mlarraz/rubinius,Wirachmat/rubinius,pH14/rubinius,Azizou/rubinius,slawosz/rubinius,mlarraz/rubinius,dblock/rubinius,ruipserra/rubinius,jemc/rubinius,digitalextremist/rubinius,kachick/rubinius,pH14/rubinius,Wirachmat/rubinius,benlovell/rubinius,mlarraz/rubinius,slawosz/rubinius,heftig/rubinius,sferik/rubinius,heftig/rubinius,benlovell/rubinius,lgierth/rubinius,jsyeo/rubinius,dblock/rubinius,jsyeo/rubinius,kachick/rubinius,travis-repos/rubinius,ngpestelos/rubinius,sferik/rubinius,mlarraz/rubinius,lgierth/rubinius,ruipserra/rubinius,benlovell/rubinius,Azizou/rubinius,digitalextremist/rubinius,ngpestelos/rubinius,slawosz/rubinius,digitalextremist/rubinius,dblock/rubinius,mlarraz/rubinius,jsyeo/rubinius,kachick/rubinius,heftig/rubinius,jemc/rubinius,pH14/rubinius,travis-repos/rubinius,mlarraz/rubinius,dblock/rubinius,sferik/rubinius,Azizou/rubinius,mlarraz/rubinius,Wirachmat/rubinius
c
## Code Before: /* LibTomMath, multiple-precision integer library -- Tom St Denis * * LibTomMath is a library that provides multiple-precision * integer arithmetic as well as number theoretic functionality. * * The library was designed directly after the MPI library by * Michael Fromberger but has been written from scratch with * additional optimizations in place. * * The library is free for all purposes without any express * guarantee it works. * * Tom St Denis, [email protected], http://libtom.org */ /* swap the elements of two integers, for cases where you can't simply swap the * mp_int pointers around */ void mp_exch MPA(mp_int * a, mp_int * b) { mp_int t; if(MANAGED(a) || MANAGED(b)) { mp_init_copy(MPST, &t, a); mp_copy(MPST, a, b); mp_copy(MPST, b, &t); mp_clear(&t); return; } t = *a; *a = *b; *b = t; } #endif /* $Source: /cvs/libtom/libtommath/bn_mp_exch.c,v $ */ /* $Revision: 1.4 $ */ /* $Date: 2006/12/28 01:25:13 $ */ ## Instruction: Fix argument order in mp_exch ## Code After: /* LibTomMath, multiple-precision integer library -- Tom St Denis * * LibTomMath is a library that provides multiple-precision * integer arithmetic as well as number theoretic functionality. * * The library was designed directly after the MPI library by * Michael Fromberger but has been written from scratch with * additional optimizations in place. * * The library is free for all purposes without any express * guarantee it works. * * Tom St Denis, [email protected], http://libtom.org */ /* swap the elements of two integers, for cases where you can't simply swap the * mp_int pointers around */ void mp_exch MPA(mp_int * a, mp_int * b) { mp_int t; if(MANAGED(a) || MANAGED(b)) { mp_init(&t); // copy a to t mp_copy(MPST, a, &t); // copy b to a mp_copy(MPST, b, a); // copy t to b mp_copy(MPST, &t, b); mp_clear(&t); return; } t = *a; *a = *b; *b = t; } #endif /* $Source: /cvs/libtom/libtommath/bn_mp_exch.c,v $ */ /* $Revision: 1.4 $ */ /* $Date: 2006/12/28 01:25:13 $ */
// ... existing code ... mp_int t; if(MANAGED(a) || MANAGED(b)) { mp_init(&t); // copy a to t mp_copy(MPST, a, &t); // copy b to a mp_copy(MPST, b, a); // copy t to b mp_copy(MPST, &t, b); mp_clear(&t); return; } // ... rest of the code ...
447f19638c43cf273b6922796a203d33407bc29e
test/util.py
test/util.py
'''Helper code for theanets unit tests.''' import numpy as np class MNIST(object): NUM_DIGITS = 100 DIGIT_SIZE = 784 def setUp(self): # we just create some random "mnist digit" data of the right shape. np.random.seed(3) self.images = np.random.randn(NUM_DIGITS, DIGIT_SIZE).astype('f') self.labels = np.random.randint(0, 10, NUM_DIGITS).astype('i')
'''Helper code for theanets unit tests.''' import numpy as np class MNIST(object): NUM_DIGITS = 100 DIGIT_SIZE = 784 def setUp(self): # we just create some random "mnist digit" data of the right shape. np.random.seed(3) self.images = np.random.randn(MNIST.NUM_DIGITS, MNIST.DIGIT_SIZE).astype('f') self.labels = np.random.randint(0, 10, MNIST.NUM_DIGITS).astype('i')
Use proper namespace for constants.
Use proper namespace for constants.
Python
mit
chrinide/theanets,devdoer/theanets,lmjohns3/theanets
python
## Code Before: '''Helper code for theanets unit tests.''' import numpy as np class MNIST(object): NUM_DIGITS = 100 DIGIT_SIZE = 784 def setUp(self): # we just create some random "mnist digit" data of the right shape. np.random.seed(3) self.images = np.random.randn(NUM_DIGITS, DIGIT_SIZE).astype('f') self.labels = np.random.randint(0, 10, NUM_DIGITS).astype('i') ## Instruction: Use proper namespace for constants. ## Code After: '''Helper code for theanets unit tests.''' import numpy as np class MNIST(object): NUM_DIGITS = 100 DIGIT_SIZE = 784 def setUp(self): # we just create some random "mnist digit" data of the right shape. np.random.seed(3) self.images = np.random.randn(MNIST.NUM_DIGITS, MNIST.DIGIT_SIZE).astype('f') self.labels = np.random.randint(0, 10, MNIST.NUM_DIGITS).astype('i')
# ... existing code ... def setUp(self): # we just create some random "mnist digit" data of the right shape. np.random.seed(3) self.images = np.random.randn(MNIST.NUM_DIGITS, MNIST.DIGIT_SIZE).astype('f') self.labels = np.random.randint(0, 10, MNIST.NUM_DIGITS).astype('i') # ... rest of the code ...
5d7c85d33f8e51947d8791bd597720f161a48f82
game/views.py
game/views.py
from django.shortcuts import render from django.http import HttpResponse def index(request): context = {'text': 'Welcome to our game'} return render(request, 'game/index.html', context) def users(request): context = {'text': 'User list here'} return render(request, 'game/users.html', context) def user_detail(request, user_id): return HttpResponse("This page will have user details") def leaderboard(request): context = {'text': 'Leaderboard goes here'} return render(request, 'game/leaderboard.html', context)
from django.shortcuts import render from django.http import HttpResponse def index(request): context = {'text': 'Welcome to our game'} return render(request, 'game/index.html', context) def register(request): context = {'text': 'Register here'} return render(request, 'registration/register.html', context) def users(request): context = {'text': 'User list here'} return render(request, 'game/users.html', context) def user_detail(request, user_id): return HttpResponse("This page will have user details") def leaderboard(request): context = {'text': 'Leaderboard goes here'} return render(request, 'game/leaderboard.html', context)
Fix merge conflict with master repo
Fix merge conflict with master repo
Python
mit
shintouki/augmented-pandemic,shintouki/augmented-pandemic,shintouki/augmented-pandemic
python
## Code Before: from django.shortcuts import render from django.http import HttpResponse def index(request): context = {'text': 'Welcome to our game'} return render(request, 'game/index.html', context) def users(request): context = {'text': 'User list here'} return render(request, 'game/users.html', context) def user_detail(request, user_id): return HttpResponse("This page will have user details") def leaderboard(request): context = {'text': 'Leaderboard goes here'} return render(request, 'game/leaderboard.html', context) ## Instruction: Fix merge conflict with master repo ## Code After: from django.shortcuts import render from django.http import HttpResponse def index(request): context = {'text': 'Welcome to our game'} return render(request, 'game/index.html', context) def register(request): context = {'text': 'Register here'} return render(request, 'registration/register.html', context) def users(request): context = {'text': 'User list here'} return render(request, 'game/users.html', context) def user_detail(request, user_id): return HttpResponse("This page will have user details") def leaderboard(request): context = {'text': 'Leaderboard goes here'} return render(request, 'game/leaderboard.html', context)
# ... existing code ... def index(request): context = {'text': 'Welcome to our game'} return render(request, 'game/index.html', context) def register(request): context = {'text': 'Register here'} return render(request, 'registration/register.html', context) def users(request): context = {'text': 'User list here'} # ... rest of the code ...
6c0e6e79c05b95001ad4c7c1f6ab3b505ffbd6a5
examples/comparator_example.py
examples/comparator_example.py
import pprint import maec.bindings.maec_bundle as maec_bundle_binding from maec.bundle.bundle import Bundle # Matching properties dictionary match_on_dictionary = {'FileObjectType': ['file_name'], 'WindowsRegistryKeyObjectType': ['hive', 'values.name/data'], 'WindowsMutexObjectType': ['name']} # Parse in the input Bundle documents and create their python-maec Bundle class representations bundle1 = Bundle.from_obj(maec_bundle_binding.parse("zeus_anubis_maec.xml")) bundle2 = Bundle.from_obj(maec_bundle_binding.parse("zeus_threatexpert_maec.xml")) # Perform the comparison and get the results comparison_results = Bundle.compare([bundle1, bundle2], match_on = match_on_dictionary, case_sensitive = False) # Pretty print the common and unique Objects print "******Common Objects:*******\n" pprint.pprint(comparison_results.get_common()) print "****************************" print "******Unique Objects:*******\n" pprint.pprint(comparison_results.get_unique()) print "****************************"
import pprint import maec.bindings.maec_bundle as maec_bundle_binding from maec.bundle.bundle import Bundle # Matching properties dictionary match_on_dictionary = {'FileObjectType': ['full_name'], 'WindowsRegistryKeyObjectType': ['hive', 'values.name/data'], 'WindowsMutexObjectType': ['name']} # Parse in the input Bundle documents and create their python-maec Bundle class representations bundle1 = Bundle.from_obj(maec_bundle_binding.parse("zeus_threatexpert_maec.xml")) bundle2 = Bundle.from_obj(maec_bundle_binding.parse("zeus_anubis_maec.xml")) # Perform the comparison and get the results comparison_results = Bundle.compare([bundle1, bundle2], match_on = match_on_dictionary, case_sensitive = False) # Pretty print the common and unique Objects print "******Common Objects:*******\n" pprint.pprint(comparison_results.get_common()) print "****************************" print "******Unique Objects:*******\n" pprint.pprint(comparison_results.get_unique()) print "****************************"
Change comparison parameter in example
Change comparison parameter in example
Python
bsd-3-clause
MAECProject/python-maec
python
## Code Before: import pprint import maec.bindings.maec_bundle as maec_bundle_binding from maec.bundle.bundle import Bundle # Matching properties dictionary match_on_dictionary = {'FileObjectType': ['file_name'], 'WindowsRegistryKeyObjectType': ['hive', 'values.name/data'], 'WindowsMutexObjectType': ['name']} # Parse in the input Bundle documents and create their python-maec Bundle class representations bundle1 = Bundle.from_obj(maec_bundle_binding.parse("zeus_anubis_maec.xml")) bundle2 = Bundle.from_obj(maec_bundle_binding.parse("zeus_threatexpert_maec.xml")) # Perform the comparison and get the results comparison_results = Bundle.compare([bundle1, bundle2], match_on = match_on_dictionary, case_sensitive = False) # Pretty print the common and unique Objects print "******Common Objects:*******\n" pprint.pprint(comparison_results.get_common()) print "****************************" print "******Unique Objects:*******\n" pprint.pprint(comparison_results.get_unique()) print "****************************" ## Instruction: Change comparison parameter in example ## Code After: import pprint import maec.bindings.maec_bundle as maec_bundle_binding from maec.bundle.bundle import Bundle # Matching properties dictionary match_on_dictionary = {'FileObjectType': ['full_name'], 'WindowsRegistryKeyObjectType': ['hive', 'values.name/data'], 'WindowsMutexObjectType': ['name']} # Parse in the input Bundle documents and create their python-maec Bundle class representations bundle1 = Bundle.from_obj(maec_bundle_binding.parse("zeus_threatexpert_maec.xml")) bundle2 = Bundle.from_obj(maec_bundle_binding.parse("zeus_anubis_maec.xml")) # Perform the comparison and get the results comparison_results = Bundle.compare([bundle1, bundle2], match_on = match_on_dictionary, case_sensitive = False) # Pretty print the common and unique Objects print "******Common Objects:*******\n" pprint.pprint(comparison_results.get_common()) print "****************************" print "******Unique Objects:*******\n" pprint.pprint(comparison_results.get_unique()) print "****************************"
// ... existing code ... import maec.bindings.maec_bundle as maec_bundle_binding from maec.bundle.bundle import Bundle # Matching properties dictionary match_on_dictionary = {'FileObjectType': ['full_name'], 'WindowsRegistryKeyObjectType': ['hive', 'values.name/data'], 'WindowsMutexObjectType': ['name']} # Parse in the input Bundle documents and create their python-maec Bundle class representations bundle1 = Bundle.from_obj(maec_bundle_binding.parse("zeus_threatexpert_maec.xml")) bundle2 = Bundle.from_obj(maec_bundle_binding.parse("zeus_anubis_maec.xml")) # Perform the comparison and get the results comparison_results = Bundle.compare([bundle1, bundle2], match_on = match_on_dictionary, case_sensitive = False) # Pretty print the common and unique Objects // ... rest of the code ...
416575ca3cc684925be0391b43b98a9fa1d9f909
ObjectTracking/testTrack.py
ObjectTracking/testTrack.py
from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display # Open reference video cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video') # Select reference image img=cam.getFrame(50) modelImage = img.crop(255, 180, 70, 20) modelImage = Image('kite_detail.jpg') ts = [] disp=Display() for i in range(0,50): img = cam.getImage() while (disp.isNotDone()): img = cam.getImage() bb = (255, 180, 70, 20) ts = img.track("camshift",ts,modelImage,bb, num_frames = 1) # now here in first loop iteration since ts is empty, # img0 and bb will be considered. # New tracking object will be created and added in ts (TrackSet) # After first iteration, ts is not empty and hence the previous # image frames and bounding box will be taken from ts and img0 # and bb will be ignored. ts.drawPath() img.show()
from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display, Color # Open reference video cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video') # Select reference image img=cam.getFrame(50) modelImage = img.crop(255, 180, 70, 20) modelImage = Image('kite_detail.jpg') ts = [] disp=Display() for i in range(0,50): img = cam.getImage() while (disp.isNotDone()): img = cam.getImage() bb = (255, 180, 70, 20) ts = img.track("camshift",ts,modelImage,bb, num_frames = 1) modelImage = Image('kite_detail.jpg') # now here in first loop iteration since ts is empty, # img0 and bb will be considered. # New tracking object will be created and added in ts (TrackSet) # After first iteration, ts is not empty and hence the previous # image frames and bounding box will be taken from ts and img0 # and bb will be ignored. ts.draw() ts.drawBB() ts.showCoordinates() img.show()
Save the image of the selection (to be able to reinitialise later)
Save the image of the selection (to be able to reinitialise later)
Python
mit
baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite
python
## Code Before: from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display # Open reference video cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video') # Select reference image img=cam.getFrame(50) modelImage = img.crop(255, 180, 70, 20) modelImage = Image('kite_detail.jpg') ts = [] disp=Display() for i in range(0,50): img = cam.getImage() while (disp.isNotDone()): img = cam.getImage() bb = (255, 180, 70, 20) ts = img.track("camshift",ts,modelImage,bb, num_frames = 1) # now here in first loop iteration since ts is empty, # img0 and bb will be considered. # New tracking object will be created and added in ts (TrackSet) # After first iteration, ts is not empty and hence the previous # image frames and bounding box will be taken from ts and img0 # and bb will be ignored. ts.drawPath() img.show() ## Instruction: Save the image of the selection (to be able to reinitialise later) ## Code After: from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display, Color # Open reference video cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video') # Select reference image img=cam.getFrame(50) modelImage = img.crop(255, 180, 70, 20) modelImage = Image('kite_detail.jpg') ts = [] disp=Display() for i in range(0,50): img = cam.getImage() while (disp.isNotDone()): img = cam.getImage() bb = (255, 180, 70, 20) ts = img.track("camshift",ts,modelImage,bb, num_frames = 1) modelImage = Image('kite_detail.jpg') # now here in first loop iteration since ts is empty, # img0 and bb will be considered. # New tracking object will be created and added in ts (TrackSet) # After first iteration, ts is not empty and hence the previous # image frames and bounding box will be taken from ts and img0 # and bb will be ignored. ts.draw() ts.drawBB() ts.showCoordinates() img.show()
... from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display, Color # Open reference video cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video') ... img = cam.getImage() bb = (255, 180, 70, 20) ts = img.track("camshift",ts,modelImage,bb, num_frames = 1) modelImage = Image('kite_detail.jpg') # now here in first loop iteration since ts is empty, # img0 and bb will be considered. # New tracking object will be created and added in ts (TrackSet) ... # After first iteration, ts is not empty and hence the previous # image frames and bounding box will be taken from ts and img0 # and bb will be ignored. ts.draw() ts.drawBB() ts.showCoordinates() img.show() ...
e1bc92abaf23002c37b9a8b7e5bf12b175be1a40
tools/translate.py
tools/translate.py
import re import os abspath = os.path.abspath(__file__) dname = os.path.dirname(abspath) os.chdir(dname) path = '../web/l10n/' files = [f for f in os.listdir(path) if os.path.isfile(path + f) and f.endswith('.js') and not f.endswith('en.js')] for f in files: f = path + f print 'en -> ' + f[-5:-3] dict = {} for line in open(f).read().splitlines(): match = re.search(" (\\w+): '(.+)'(,)?", line) if match: dict[match.group(1)] = match.group(2) out = open(f, 'w') for line in open(path + 'en.js').read().splitlines(): match = re.search(" (\\w+): '(.+)'(,)?", line) if match: if dict.has_key(match.group(1)): value = dict[match.group(1)] else: print '"' + match.group(2) + '"' value = match.group(2) + ' (*)' out.write(' ' + match.group(1) + ": '" + value + "'") if match.group(3) is not None: out.write(',') out.write('\n') else: out.write(line + '\n')
import os import optparse import urllib2 import json import base64 parser = optparse.OptionParser() parser.add_option("-u", "--user", dest="username", help="transifex user login") parser.add_option("-p", "--password", dest="password", help="transifex user password") (options, args) = parser.parse_args() if not options.username or not options.password: parser.error('User name and password are required') os.chdir(os.path.dirname(os.path.abspath(__file__))) path = "../web/l10n/" def request(url): req = urllib2.Request(url) auth = base64.encodestring("%s:%s" % (options.username, options.password)).replace("\n", "") req.add_header("Authorization", "Basic %s" % auth) return urllib2.urlopen(req) resource = json.load(request("https://www.transifex.com/api/2/project/traccar/resource/web/?details")) for language in resource["available_languages"]: code = language["code"] data = request("https://www.transifex.com/api/2/project/traccar/resource/web/translation/" + code + "?file") file = open(path + code + ".json", "wb") file.write(data.read()) file.close()
Use transifex service for tranlation
Use transifex service for tranlation
Python
apache-2.0
joseant/traccar-1,vipien/traccar,tananaev/traccar,jon-stumpf/traccar,jon-stumpf/traccar,al3x1s/traccar,AnshulJain1985/Roadcast-Tracker,joseant/traccar-1,AnshulJain1985/Roadcast-Tracker,al3x1s/traccar,tsmgeek/traccar,tsmgeek/traccar,ninioe/traccar,jon-stumpf/traccar,5of9/traccar,tananaev/traccar,orcoliver/traccar,tananaev/traccar,orcoliver/traccar,renaudallard/traccar,duke2906/traccar,jssenyange/traccar,jssenyange/traccar,ninioe/traccar,ninioe/traccar,stalien/traccar_test,renaudallard/traccar,duke2906/traccar,vipien/traccar,stalien/traccar_test,5of9/traccar,jssenyange/traccar,orcoliver/traccar,tsmgeek/traccar
python
## Code Before: import re import os abspath = os.path.abspath(__file__) dname = os.path.dirname(abspath) os.chdir(dname) path = '../web/l10n/' files = [f for f in os.listdir(path) if os.path.isfile(path + f) and f.endswith('.js') and not f.endswith('en.js')] for f in files: f = path + f print 'en -> ' + f[-5:-3] dict = {} for line in open(f).read().splitlines(): match = re.search(" (\\w+): '(.+)'(,)?", line) if match: dict[match.group(1)] = match.group(2) out = open(f, 'w') for line in open(path + 'en.js').read().splitlines(): match = re.search(" (\\w+): '(.+)'(,)?", line) if match: if dict.has_key(match.group(1)): value = dict[match.group(1)] else: print '"' + match.group(2) + '"' value = match.group(2) + ' (*)' out.write(' ' + match.group(1) + ": '" + value + "'") if match.group(3) is not None: out.write(',') out.write('\n') else: out.write(line + '\n') ## Instruction: Use transifex service for tranlation ## Code After: import os import optparse import urllib2 import json import base64 parser = optparse.OptionParser() parser.add_option("-u", "--user", dest="username", help="transifex user login") parser.add_option("-p", "--password", dest="password", help="transifex user password") (options, args) = parser.parse_args() if not options.username or not options.password: parser.error('User name and password are required') os.chdir(os.path.dirname(os.path.abspath(__file__))) path = "../web/l10n/" def request(url): req = urllib2.Request(url) auth = base64.encodestring("%s:%s" % (options.username, options.password)).replace("\n", "") req.add_header("Authorization", "Basic %s" % auth) return urllib2.urlopen(req) resource = json.load(request("https://www.transifex.com/api/2/project/traccar/resource/web/?details")) for language in resource["available_languages"]: code = language["code"] data = request("https://www.transifex.com/api/2/project/traccar/resource/web/translation/" + code + "?file") file = open(path + code + ".json", "wb") file.write(data.read()) file.close()
// ... existing code ... import os import optparse import urllib2 import json import base64 parser = optparse.OptionParser() parser.add_option("-u", "--user", dest="username", help="transifex user login") parser.add_option("-p", "--password", dest="password", help="transifex user password") (options, args) = parser.parse_args() if not options.username or not options.password: parser.error('User name and password are required') os.chdir(os.path.dirname(os.path.abspath(__file__))) path = "../web/l10n/" def request(url): req = urllib2.Request(url) auth = base64.encodestring("%s:%s" % (options.username, options.password)).replace("\n", "") req.add_header("Authorization", "Basic %s" % auth) return urllib2.urlopen(req) resource = json.load(request("https://www.transifex.com/api/2/project/traccar/resource/web/?details")) for language in resource["available_languages"]: code = language["code"] data = request("https://www.transifex.com/api/2/project/traccar/resource/web/translation/" + code + "?file") file = open(path + code + ".json", "wb") file.write(data.read()) file.close() // ... rest of the code ...
c4c2ca46421f642a05f12aa5d0ab7af8313c7df0
extobjc/extobjc.h
extobjc/extobjc.h
/* * extobjc.h * extobjc * * Created by Justin Spahr-Summers on 2010-11-09. * Released into the public domain. */ #import "EXTADT.h" #import "EXTAspect.h" #import "EXTBlockMethod.h" #import "EXTBlockTarget.h" #import "EXTConcreteProtocol.h" #import "EXTDispatchObject.h" #import "EXTFinalMethod.h" #import "EXTKeyPathCoding.h" #import "EXTMaybe.h" #import "EXTMixin.h" #import "EXTMultiObject.h" #import "EXTNil.h" #import "EXTPrivateMethod.h" #import "EXTProtocolCategory.h" #import "EXTSafeCategory.h" #import "EXTScope.h" #import "EXTSwizzle.h" #import "EXTTuple.h" #import "EXTVarargs.h" #import "NSInvocation+EXT.h" #import "NSMethodSignature+EXT.h"
/* * extobjc.h * extobjc * * Created by Justin Spahr-Summers on 2010-11-09. * Released into the public domain. */ #import "EXTADT.h" #import "EXTAspect.h" #import "EXTBlockMethod.h" #import "EXTBlockTarget.h" #import "EXTConcreteProtocol.h" #import "EXTDispatchObject.h" #import "EXTFinalMethod.h" #import "EXTKeyPathCoding.h" #import "EXTMaybe.h" #import "EXTMixin.h" #import "EXTMultimethod.h" #import "EXTMultiObject.h" #import "EXTNil.h" #import "EXTPrivateMethod.h" #import "EXTProtocolCategory.h" #import "EXTSafeCategory.h" #import "EXTScope.h" #import "EXTSwizzle.h" #import "EXTTuple.h" #import "EXTVarargs.h" #import "NSInvocation+EXT.h" #import "NSMethodSignature+EXT.h"
Add EXTMultimethod to umbrella header
Add EXTMultimethod to umbrella header
C
mit
sandyway/libextobjc,kolyuchiy/libextobjc,WPDreamMelody/libextobjc,sunfei/libextobjc,goodheart/libextobjc,bboyesc/libextobjc,sanojnambiar/libextobjc,telly/libextobjc,jiakai-lian/libextobjc,liuruxian/libextobjc,KBvsMJ/libextobjc
c
## Code Before: /* * extobjc.h * extobjc * * Created by Justin Spahr-Summers on 2010-11-09. * Released into the public domain. */ #import "EXTADT.h" #import "EXTAspect.h" #import "EXTBlockMethod.h" #import "EXTBlockTarget.h" #import "EXTConcreteProtocol.h" #import "EXTDispatchObject.h" #import "EXTFinalMethod.h" #import "EXTKeyPathCoding.h" #import "EXTMaybe.h" #import "EXTMixin.h" #import "EXTMultiObject.h" #import "EXTNil.h" #import "EXTPrivateMethod.h" #import "EXTProtocolCategory.h" #import "EXTSafeCategory.h" #import "EXTScope.h" #import "EXTSwizzle.h" #import "EXTTuple.h" #import "EXTVarargs.h" #import "NSInvocation+EXT.h" #import "NSMethodSignature+EXT.h" ## Instruction: Add EXTMultimethod to umbrella header ## Code After: /* * extobjc.h * extobjc * * Created by Justin Spahr-Summers on 2010-11-09. * Released into the public domain. */ #import "EXTADT.h" #import "EXTAspect.h" #import "EXTBlockMethod.h" #import "EXTBlockTarget.h" #import "EXTConcreteProtocol.h" #import "EXTDispatchObject.h" #import "EXTFinalMethod.h" #import "EXTKeyPathCoding.h" #import "EXTMaybe.h" #import "EXTMixin.h" #import "EXTMultimethod.h" #import "EXTMultiObject.h" #import "EXTNil.h" #import "EXTPrivateMethod.h" #import "EXTProtocolCategory.h" #import "EXTSafeCategory.h" #import "EXTScope.h" #import "EXTSwizzle.h" #import "EXTTuple.h" #import "EXTVarargs.h" #import "NSInvocation+EXT.h" #import "NSMethodSignature+EXT.h"
... #import "EXTKeyPathCoding.h" #import "EXTMaybe.h" #import "EXTMixin.h" #import "EXTMultimethod.h" #import "EXTMultiObject.h" #import "EXTNil.h" #import "EXTPrivateMethod.h" ...
fa458a1f7c574c0fe8d40dff4c1af418c365b8ad
src/main.c
src/main.c
int main(int argc, char** argv) { int tun_fd; char buf[100]; char *dev = calloc(10, 1); CLEAR(buf); tun_fd = tun_alloc(dev); if (set_if_up(dev) != 0) { printf("ERROR when setting up if\n"); } if (set_if_address(dev, "10.0.0.5/24") != 0) { printf("ERROR when setting address for if\n"); }; if (set_if_route(dev, "10.0.0.0/24") != 0) { printf("ERROR when setting route for if\n"); } read(tun_fd, buf, 100); print_hexdump(buf, 100); free(dev); }
int main(int argc, char** argv) { int tun_fd; char buf[100]; char *dev = calloc(10, 1); CLEAR(buf); tun_fd = tun_alloc(dev); if (set_if_up(dev) != 0) { printf("ERROR when setting up if\n"); } if (set_if_address(dev, "10.0.0.5/24") != 0) { printf("ERROR when setting address for if\n"); }; if (set_if_route(dev, "10.0.0.0/24") != 0) { printf("ERROR when setting route for if\n"); } while (1) { read(tun_fd, buf, 100); print_hexdump(buf, 100); } free(dev); }
Read from tun buffer and print hexdump in loop
Read from tun buffer and print hexdump in loop
C
mit
saminiir/level-ip,saminiir/level-ip
c
## Code Before: int main(int argc, char** argv) { int tun_fd; char buf[100]; char *dev = calloc(10, 1); CLEAR(buf); tun_fd = tun_alloc(dev); if (set_if_up(dev) != 0) { printf("ERROR when setting up if\n"); } if (set_if_address(dev, "10.0.0.5/24") != 0) { printf("ERROR when setting address for if\n"); }; if (set_if_route(dev, "10.0.0.0/24") != 0) { printf("ERROR when setting route for if\n"); } read(tun_fd, buf, 100); print_hexdump(buf, 100); free(dev); } ## Instruction: Read from tun buffer and print hexdump in loop ## Code After: int main(int argc, char** argv) { int tun_fd; char buf[100]; char *dev = calloc(10, 1); CLEAR(buf); tun_fd = tun_alloc(dev); if (set_if_up(dev) != 0) { printf("ERROR when setting up if\n"); } if (set_if_address(dev, "10.0.0.5/24") != 0) { printf("ERROR when setting address for if\n"); }; if (set_if_route(dev, "10.0.0.0/24") != 0) { printf("ERROR when setting route for if\n"); } while (1) { read(tun_fd, buf, 100); print_hexdump(buf, 100); } free(dev); }
# ... existing code ... printf("ERROR when setting route for if\n"); } while (1) { read(tun_fd, buf, 100); print_hexdump(buf, 100); } free(dev); } # ... rest of the code ...
9d19fb7ada5caaa2dc74736cd12635bed3d8516a
setup.py
setup.py
import os from setuptools import setup, find_packages setup(name='morepath', version = '0.1dev', description="A micro web-framework with superpowers", author="Martijn Faassen", author_email="[email protected]", license="BSD", packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'venusian', 'reg', 'werkzeug', ], extras_require = dict( test=['pytest >= 2.0', 'pytest-cov'], ), )
import os from setuptools import setup, find_packages setup(name='morepath', version = '0.1dev', description="A micro web-framework with superpowers", author="Martijn Faassen", author_email="[email protected]", license="BSD", packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'venusian >= 1.0a8', 'reg', 'werkzeug >= 0.9.4', ], extras_require = dict( test=['pytest >= 2.0', 'pytest-cov'], ), )
Put in some version requirements.
Put in some version requirements.
Python
bsd-3-clause
taschini/morepath,faassen/morepath,morepath/morepath
python
## Code Before: import os from setuptools import setup, find_packages setup(name='morepath', version = '0.1dev', description="A micro web-framework with superpowers", author="Martijn Faassen", author_email="[email protected]", license="BSD", packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'venusian', 'reg', 'werkzeug', ], extras_require = dict( test=['pytest >= 2.0', 'pytest-cov'], ), ) ## Instruction: Put in some version requirements. ## Code After: import os from setuptools import setup, find_packages setup(name='morepath', version = '0.1dev', description="A micro web-framework with superpowers", author="Martijn Faassen", author_email="[email protected]", license="BSD", packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'venusian >= 1.0a8', 'reg', 'werkzeug >= 0.9.4', ], extras_require = dict( test=['pytest >= 2.0', 'pytest-cov'], ), )
// ... existing code ... zip_safe=False, install_requires=[ 'setuptools', 'venusian >= 1.0a8', 'reg', 'werkzeug >= 0.9.4', ], extras_require = dict( test=['pytest >= 2.0', // ... rest of the code ...
fe451116ffcb12621600310b6d4ca9b6316494ff
scripts/zpe.py
scripts/zpe.py
import logging from vaspy.iter import OutCar _logger = logging.getLogger("vaspy.script") if "__main__" == __name__: outcar = OutCar() poscar = outcar.poscar freq_types = outcar.freq_types # Frequency info. _logger.info("{:<10s}{:<20s}".format("atom", "freq_type")) _logger.info("-"*25) idx = 0 tfs = poscar.tf.tolist() for atom_idx, tf in enumerate(tfs): if tf == ["T", "T", "T"]: _logger.info("{:<10d}{:<5s}{:<5s}{:<5s}".format(atom_idx+1, *freq_types[idx])) idx += 1 # Zero point energy. _logger.info("") _logger.info("ZPE = {}".format(outcar.zpe))
import logging from vaspy.iter import OutCar _logger = logging.getLogger("vaspy.script") if "__main__" == __name__: outcar = OutCar() poscar = outcar.poscar freq_types = outcar.freq_types # Frequency info. _logger.info("{:<10s}{:<10s}{:<20s}".format("atom", "type", "freq_type")) _logger.info("-"*35) # Get atom types. atom_types = [] for t, n in zip(poscar.atoms, poscar.atoms_num): atom_types += [t]*n idx = 0 tfs = poscar.tf.tolist() for atom_idx, tf in enumerate(tfs): if tf == ["T", "T", "T"]: msg = "{:<10d}{:<10s}{:<5s}{:<5s}{:<5s}" msg = msg.format(atom_idx+1, atom_types[atom_idx], *freq_types[idx]) _logger.info(msg) idx += 1 # Zero point energy. _logger.info("") _logger.info("ZPE = {}".format(outcar.zpe))
Add atom type info output.
Add atom type info output.
Python
mit
PytLab/VASPy,PytLab/VASPy
python
## Code Before: import logging from vaspy.iter import OutCar _logger = logging.getLogger("vaspy.script") if "__main__" == __name__: outcar = OutCar() poscar = outcar.poscar freq_types = outcar.freq_types # Frequency info. _logger.info("{:<10s}{:<20s}".format("atom", "freq_type")) _logger.info("-"*25) idx = 0 tfs = poscar.tf.tolist() for atom_idx, tf in enumerate(tfs): if tf == ["T", "T", "T"]: _logger.info("{:<10d}{:<5s}{:<5s}{:<5s}".format(atom_idx+1, *freq_types[idx])) idx += 1 # Zero point energy. _logger.info("") _logger.info("ZPE = {}".format(outcar.zpe)) ## Instruction: Add atom type info output. ## Code After: import logging from vaspy.iter import OutCar _logger = logging.getLogger("vaspy.script") if "__main__" == __name__: outcar = OutCar() poscar = outcar.poscar freq_types = outcar.freq_types # Frequency info. _logger.info("{:<10s}{:<10s}{:<20s}".format("atom", "type", "freq_type")) _logger.info("-"*35) # Get atom types. atom_types = [] for t, n in zip(poscar.atoms, poscar.atoms_num): atom_types += [t]*n idx = 0 tfs = poscar.tf.tolist() for atom_idx, tf in enumerate(tfs): if tf == ["T", "T", "T"]: msg = "{:<10d}{:<10s}{:<5s}{:<5s}{:<5s}" msg = msg.format(atom_idx+1, atom_types[atom_idx], *freq_types[idx]) _logger.info(msg) idx += 1 # Zero point energy. _logger.info("") _logger.info("ZPE = {}".format(outcar.zpe))
... freq_types = outcar.freq_types # Frequency info. _logger.info("{:<10s}{:<10s}{:<20s}".format("atom", "type", "freq_type")) _logger.info("-"*35) # Get atom types. atom_types = [] for t, n in zip(poscar.atoms, poscar.atoms_num): atom_types += [t]*n idx = 0 tfs = poscar.tf.tolist() for atom_idx, tf in enumerate(tfs): if tf == ["T", "T", "T"]: msg = "{:<10d}{:<10s}{:<5s}{:<5s}{:<5s}" msg = msg.format(atom_idx+1, atom_types[atom_idx], *freq_types[idx]) _logger.info(msg) idx += 1 # Zero point energy. ...
c7689244b6de2cc9a01568e7cdab543cf8790214
setup.py
setup.py
import os import subprocess from distutils.core import setup try: if os.path.exists(".git"): s = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out = s.communicate()[0] GIT_REVISION = out.strip() else: GIT_REVISION = "" except WindowsError: GIT_REVISION = "" FULL_VERSION = '0.0.1dev' if "dev" in FULL_VERSION: RELEASED = False VERSION = FULL_VERSION+GIT_REVISION[:7] else: RELEASED = True VERSION = FULL_VERSION def generate_version_py(filename): cnt = """\ # This file was autogenerated version = '%s' git_revision = '%s' """ cnt = cnt % (VERSION, GIT_REVISION) f = open(filename, "w") try: f.write(cnt) finally: f.close() setup( name='SAHGutils', version=VERSION, author='Scott Sinclair', author_email='[email protected]', packages=['sahgutils'], license='LICENSE.txt', description='Useful tools for data analysis and plots.', long_description=open('README.txt').read(), ) if __name__ == '__main__': generate_version_py("sahgutils/__dev_version.py")
import os import subprocess from distutils.core import setup try: if os.path.exists(".git"): s = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out = s.communicate()[0] GIT_REVISION = out.strip() else: GIT_REVISION = "unknown" except WindowsError: GIT_REVISION = "unknown" FULL_VERSION = '0.0.1-dev' if "dev" in FULL_VERSION: RELEASED = False VERSION = FULL_VERSION + '-' + GIT_REVISION[:7] else: RELEASED = True VERSION = FULL_VERSION def generate_version_py(filename): cnt = """\ # This file was autogenerated version = '%s' git_revision = '%s' """ cnt = cnt % (VERSION, GIT_REVISION) f = open(filename, "w") try: f.write(cnt) finally: f.close() setup( name='SAHGutils', version=VERSION, author='Scott Sinclair', author_email='[email protected]', packages=['sahgutils'], license='LICENSE.txt', description='Useful tools for data analysis and plots.', long_description=open('README.txt').read(), ) if __name__ == '__main__': generate_version_py("sahgutils/__dev_version.py")
Improve the version information string
ENH: Improve the version information string
Python
bsd-3-clause
sahg/SAHGutils
python
## Code Before: import os import subprocess from distutils.core import setup try: if os.path.exists(".git"): s = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out = s.communicate()[0] GIT_REVISION = out.strip() else: GIT_REVISION = "" except WindowsError: GIT_REVISION = "" FULL_VERSION = '0.0.1dev' if "dev" in FULL_VERSION: RELEASED = False VERSION = FULL_VERSION+GIT_REVISION[:7] else: RELEASED = True VERSION = FULL_VERSION def generate_version_py(filename): cnt = """\ # This file was autogenerated version = '%s' git_revision = '%s' """ cnt = cnt % (VERSION, GIT_REVISION) f = open(filename, "w") try: f.write(cnt) finally: f.close() setup( name='SAHGutils', version=VERSION, author='Scott Sinclair', author_email='[email protected]', packages=['sahgutils'], license='LICENSE.txt', description='Useful tools for data analysis and plots.', long_description=open('README.txt').read(), ) if __name__ == '__main__': generate_version_py("sahgutils/__dev_version.py") ## Instruction: ENH: Improve the version information string ## Code After: import os import subprocess from distutils.core import setup try: if os.path.exists(".git"): s = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out = s.communicate()[0] GIT_REVISION = out.strip() else: GIT_REVISION = "unknown" except WindowsError: GIT_REVISION = "unknown" FULL_VERSION = '0.0.1-dev' if "dev" in FULL_VERSION: RELEASED = False VERSION = FULL_VERSION + '-' + GIT_REVISION[:7] else: RELEASED = True VERSION = FULL_VERSION def generate_version_py(filename): cnt = """\ # This file was autogenerated version = '%s' git_revision = '%s' """ cnt = cnt % (VERSION, GIT_REVISION) f = open(filename, "w") try: f.write(cnt) finally: f.close() setup( name='SAHGutils', version=VERSION, author='Scott Sinclair', author_email='[email protected]', packages=['sahgutils'], license='LICENSE.txt', description='Useful tools for data analysis and plots.', long_description=open('README.txt').read(), ) if __name__ == '__main__': generate_version_py("sahgutils/__dev_version.py")
# ... existing code ... out = s.communicate()[0] GIT_REVISION = out.strip() else: GIT_REVISION = "unknown" except WindowsError: GIT_REVISION = "unknown" FULL_VERSION = '0.0.1-dev' if "dev" in FULL_VERSION: RELEASED = False VERSION = FULL_VERSION + '-' + GIT_REVISION[:7] else: RELEASED = True VERSION = FULL_VERSION # ... rest of the code ...
7c88ecf10c3197c337990c7f92c7ace6a85d316e
setup.py
setup.py
from distutils.core import setup from distutils.core import Extension setup(name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], )
import os from distutils.core import setup from distutils.core import Extension with_extensions = os.environ.get('WRAPT_EXTENSIONS', 'true') with_extensions = (with_extensions.lower() != 'false') setup_kwargs = dict( name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ) setup_extension_kwargs = dict( ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) if with_extensions: setup_kwargs.update(setup_extension_kwargs) setup(**setup_kwargs)
Make compilation of extensions optional through an environment variable.
Make compilation of extensions optional through an environment variable.
Python
bsd-2-clause
akash1808/wrapt,github4ry/wrapt,wujuguang/wrapt,akash1808/wrapt,wujuguang/wrapt,pombredanne/wrapt,pombredanne/wrapt,GrahamDumpleton/wrapt,pombredanne/python-lazy-object-proxy,ionelmc/python-lazy-object-proxy,linglaiyao1314/wrapt,GrahamDumpleton/wrapt,linglaiyao1314/wrapt,pombredanne/python-lazy-object-proxy,ionelmc/python-lazy-object-proxy,github4ry/wrapt
python
## Code Before: from distutils.core import setup from distutils.core import Extension setup(name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) ## Instruction: Make compilation of extensions optional through an environment variable. ## Code After: import os from distutils.core import setup from distutils.core import Extension with_extensions = os.environ.get('WRAPT_EXTENSIONS', 'true') with_extensions = (with_extensions.lower() != 'false') setup_kwargs = dict( name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ) setup_extension_kwargs = dict( ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) if with_extensions: setup_kwargs.update(setup_extension_kwargs) setup(**setup_kwargs)
... import os from distutils.core import setup from distutils.core import Extension with_extensions = os.environ.get('WRAPT_EXTENSIONS', 'true') with_extensions = (with_extensions.lower() != 'false') setup_kwargs = dict( name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', ... url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ) setup_extension_kwargs = dict( ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) if with_extensions: setup_kwargs.update(setup_extension_kwargs) setup(**setup_kwargs) ...
dcd6d830033914a0ccf26822d6f305c084b90987
f8a_jobs/defaults.py
f8a_jobs/defaults.py
import os from datetime import timedelta _BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__)) DEFAULT_SERVICE_PORT = 34000 SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml') DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs') TOKEN_VALID_TIME = timedelta(days=14) AUTH_ORGANIZATION = 'fabric8-analytics' GITHUB_CONSUMER_KEY = os.getenv('GITHUB_CONSUMER_KEY', 'not-set') GITHUB_CONSUMER_SECRET = os.getenv('GITHUB_CONSUMER_SECRET', 'not-set') GITHUB_ACCESS_TOKENS = os.getenv('GITHUB_ACCESS_TOKENS', '').split(',') APP_SECRET_KEY = os.getenv('APP_SECRET_KEY', 'not-set') AWS_ACCESS_KEY_ID = os.getenv('AWS_SQS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SQS_SECRET_ACCESS_KEY') AWS_SQS_REGION = os.getenv('AWS_SQS_REGION', 'us-east-1') DEPLOYMENT_PREFIX = os.getenv('AWS_SQS_REGION', 'us-east-1') # keep disabled authentication by default DISABLE_AUTHENTICATION = os.getenv('DISABLE_AUTHENTICATION', '1') in ('1', 'True', 'true')
import os from datetime import timedelta _BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__)) DEFAULT_SERVICE_PORT = 34000 SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml') DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs') TOKEN_VALID_TIME = timedelta(days=14) AUTH_ORGANIZATION = 'fabric8-analytics' GITHUB_CONSUMER_KEY = os.getenv('GITHUB_CONSUMER_KEY', 'not-set') GITHUB_CONSUMER_SECRET = os.getenv('GITHUB_CONSUMER_SECRET', 'not-set') GITHUB_ACCESS_TOKENS = os.getenv('GITHUB_ACCESS_TOKENS', '').split(',') APP_SECRET_KEY = os.getenv('APP_SECRET_KEY', 'not-set') AWS_ACCESS_KEY_ID = os.getenv('AWS_SQS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SQS_SECRET_ACCESS_KEY') AWS_SQS_REGION = os.getenv('AWS_SQS_REGION', 'us-east-1') DEPLOYMENT_PREFIX = os.getenv('DEPLOYMENT_PREFIX', os.getenv('USER')) # keep disabled authentication by default DISABLE_AUTHENTICATION = os.getenv('DISABLE_AUTHENTICATION', '1') in ('1', 'True', 'true')
Fix wrong variable reference in configuration
Fix wrong variable reference in configuration
Python
apache-2.0
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
python
## Code Before: import os from datetime import timedelta _BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__)) DEFAULT_SERVICE_PORT = 34000 SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml') DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs') TOKEN_VALID_TIME = timedelta(days=14) AUTH_ORGANIZATION = 'fabric8-analytics' GITHUB_CONSUMER_KEY = os.getenv('GITHUB_CONSUMER_KEY', 'not-set') GITHUB_CONSUMER_SECRET = os.getenv('GITHUB_CONSUMER_SECRET', 'not-set') GITHUB_ACCESS_TOKENS = os.getenv('GITHUB_ACCESS_TOKENS', '').split(',') APP_SECRET_KEY = os.getenv('APP_SECRET_KEY', 'not-set') AWS_ACCESS_KEY_ID = os.getenv('AWS_SQS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SQS_SECRET_ACCESS_KEY') AWS_SQS_REGION = os.getenv('AWS_SQS_REGION', 'us-east-1') DEPLOYMENT_PREFIX = os.getenv('AWS_SQS_REGION', 'us-east-1') # keep disabled authentication by default DISABLE_AUTHENTICATION = os.getenv('DISABLE_AUTHENTICATION', '1') in ('1', 'True', 'true') ## Instruction: Fix wrong variable reference in configuration ## Code After: import os from datetime import timedelta _BAYESIAN_JOBS_DIR = os.path.dirname(os.path.realpath(__file__)) DEFAULT_SERVICE_PORT = 34000 SWAGGER_YAML_PATH = os.path.join(_BAYESIAN_JOBS_DIR, 'swagger.yaml') DEFAULT_JOB_DIR = os.path.join(_BAYESIAN_JOBS_DIR, 'default_jobs') TOKEN_VALID_TIME = timedelta(days=14) AUTH_ORGANIZATION = 'fabric8-analytics' GITHUB_CONSUMER_KEY = os.getenv('GITHUB_CONSUMER_KEY', 'not-set') GITHUB_CONSUMER_SECRET = os.getenv('GITHUB_CONSUMER_SECRET', 'not-set') GITHUB_ACCESS_TOKENS = os.getenv('GITHUB_ACCESS_TOKENS', '').split(',') APP_SECRET_KEY = os.getenv('APP_SECRET_KEY', 'not-set') AWS_ACCESS_KEY_ID = os.getenv('AWS_SQS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SQS_SECRET_ACCESS_KEY') AWS_SQS_REGION = os.getenv('AWS_SQS_REGION', 'us-east-1') DEPLOYMENT_PREFIX = os.getenv('DEPLOYMENT_PREFIX', os.getenv('USER')) # keep disabled authentication by default DISABLE_AUTHENTICATION = os.getenv('DISABLE_AUTHENTICATION', '1') in ('1', 'True', 'true')
# ... existing code ... AWS_ACCESS_KEY_ID = os.getenv('AWS_SQS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SQS_SECRET_ACCESS_KEY') AWS_SQS_REGION = os.getenv('AWS_SQS_REGION', 'us-east-1') DEPLOYMENT_PREFIX = os.getenv('DEPLOYMENT_PREFIX', os.getenv('USER')) # keep disabled authentication by default DISABLE_AUTHENTICATION = os.getenv('DISABLE_AUTHENTICATION', '1') in ('1', 'True', 'true') # ... rest of the code ...
803e128b8e151c061f75051b5a4386d4c624ba56
core/settings-wni-Windows_NT.py
core/settings-wni-Windows_NT.py
from __future__ import absolute_import from qubes.storage.wni import QubesWniVmStorage def apply(system_path, vm_files, defaults): system_path['qubes_base_dir'] = 'c:\\qubes' system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml' system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml' system_path['qubes_icon_dir'] = \ 'c:/program files/Invisible Things Lab/Qubes/icons' system_path['qubesdb_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe' system_path['qrexec_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe' # Specific to WNI - normally VM have this file system_path['qrexec_agent_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe' defaults['libvirt_uri'] = 'wni:///' defaults['storage_class'] = QubesWniVmStorage
from __future__ import absolute_import from qubes.storage.wni import QubesWniVmStorage def apply(system_path, vm_files, defaults): system_path['qubes_base_dir'] = 'c:\\qubes' system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml' system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml' system_path['qubes_icon_dir'] = \ 'c:/program files/Invisible Things Lab/Qubes/icons' system_path['qubesdb_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe' system_path['qrexec_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe' system_path['qrexec_client_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-client.exe' # Specific to WNI - normally VM have this file system_path['qrexec_agent_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe' defaults['libvirt_uri'] = 'wni:///' defaults['storage_class'] = QubesWniVmStorage
Add qrexec-client path to WNI settings
wni: Add qrexec-client path to WNI settings
Python
lgpl-2.1
marmarek/qubes-core-admin,QubesOS/qubes-core-admin,QubesOS/qubes-core-admin,woju/qubes-core-admin,marmarek/qubes-core-admin,QubesOS/qubes-core-admin,woju/qubes-core-admin,woju/qubes-core-admin,woju/qubes-core-admin,marmarek/qubes-core-admin
python
## Code Before: from __future__ import absolute_import from qubes.storage.wni import QubesWniVmStorage def apply(system_path, vm_files, defaults): system_path['qubes_base_dir'] = 'c:\\qubes' system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml' system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml' system_path['qubes_icon_dir'] = \ 'c:/program files/Invisible Things Lab/Qubes/icons' system_path['qubesdb_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe' system_path['qrexec_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe' # Specific to WNI - normally VM have this file system_path['qrexec_agent_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe' defaults['libvirt_uri'] = 'wni:///' defaults['storage_class'] = QubesWniVmStorage ## Instruction: wni: Add qrexec-client path to WNI settings ## Code After: from __future__ import absolute_import from qubes.storage.wni import QubesWniVmStorage def apply(system_path, vm_files, defaults): system_path['qubes_base_dir'] = 'c:\\qubes' system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml' system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml' system_path['qubes_icon_dir'] = \ 'c:/program files/Invisible Things Lab/Qubes/icons' system_path['qubesdb_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe' system_path['qrexec_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe' system_path['qrexec_client_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-client.exe' # Specific to WNI - normally VM have this file system_path['qrexec_agent_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe' defaults['libvirt_uri'] = 'wni:///' defaults['storage_class'] = QubesWniVmStorage
// ... existing code ... 'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe' system_path['qrexec_daemon_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe' system_path['qrexec_client_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-client.exe' # Specific to WNI - normally VM have this file system_path['qrexec_agent_path'] = \ 'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe' // ... rest of the code ...
5a8d7375b617bd5605bce5f09a4caedef170a85c
gbpservice/neutron/db/migration/cli.py
gbpservice/neutron/db/migration/cli.py
from neutron.db.migration.cli import * # noqa def main(): config = alembic_config.Config( os.path.join(os.path.dirname(__file__), 'alembic.ini')) config.set_main_option( 'script_location', 'gbpservice.neutron.db.migration:alembic_migrations') config.neutron_config = CONF CONF() CONF.command.func(config, CONF.command.name)
from neutron.db.migration.cli import * # noqa def main(): config = alembic_config.Config( os.path.join(os.path.dirname(__file__), 'alembic.ini')) config.set_main_option( 'script_location', 'gbpservice.neutron.db.migration:alembic_migrations') config.neutron_config = CONF CONF(project='neutron') CONF.command.func(config, CONF.command.name)
Set project when doing neutron DB migrations
Set project when doing neutron DB migrations That way, the default configuration files/dirs from the neutron projects are read when doing the DB migrations. This is useful if eg. some configuration files are in /etc/neutron/neutron.conf.d/ . Theses files will then be automatically evaluated. Change-Id: I4997a86c4df5fa45f7682d653a5e66b1ae184a62
Python
apache-2.0
noironetworks/group-based-policy,stackforge/group-based-policy,stackforge/group-based-policy,noironetworks/group-based-policy
python
## Code Before: from neutron.db.migration.cli import * # noqa def main(): config = alembic_config.Config( os.path.join(os.path.dirname(__file__), 'alembic.ini')) config.set_main_option( 'script_location', 'gbpservice.neutron.db.migration:alembic_migrations') config.neutron_config = CONF CONF() CONF.command.func(config, CONF.command.name) ## Instruction: Set project when doing neutron DB migrations That way, the default configuration files/dirs from the neutron projects are read when doing the DB migrations. This is useful if eg. some configuration files are in /etc/neutron/neutron.conf.d/ . Theses files will then be automatically evaluated. Change-Id: I4997a86c4df5fa45f7682d653a5e66b1ae184a62 ## Code After: from neutron.db.migration.cli import * # noqa def main(): config = alembic_config.Config( os.path.join(os.path.dirname(__file__), 'alembic.ini')) config.set_main_option( 'script_location', 'gbpservice.neutron.db.migration:alembic_migrations') config.neutron_config = CONF CONF(project='neutron') CONF.command.func(config, CONF.command.name)
# ... existing code ... 'script_location', 'gbpservice.neutron.db.migration:alembic_migrations') config.neutron_config = CONF CONF(project='neutron') CONF.command.func(config, CONF.command.name) # ... rest of the code ...
2d7974ac4895af5e7d2f5a627656bb3edbfa65a9
config/config.py
config/config.py
def playerIcons(poi): if poi['id'] == 'Player': poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId'] return "Last known location for %s" % poi['EntityId'] def signFilter(poi): if poi['id'] == 'Sign': return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']]) worlds['minecraft'] = "/home/minecraft/server/world" outputdir = "/home/minecraft/render/" markers = [ dict(name="Players", filterFunction=playerIcons), dict(name="Signs", filterFunction=signFilter) ] renders["day"] = { 'world': 'minecraft', 'title': 'Day', 'rendermode': 'smooth_lighting', 'markers': markers } renders["night"] = { 'world': 'minecraft', 'title': 'Night', 'rendermode': 'smooth_night', 'markers': markers }
def playerIcons(poi): if poi['id'] == 'Player': poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId'] return "Last known location for %s" % poi['EntityId'] # Only signs with "-- RENDER --" on the last line will be shown # Otherwise, people can't have secret bases and the render is too busy anyways. def signFilter(poi): if poi['id'] == 'Sign': if poi['Text4'] == '-- RENDER --': return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']]) worlds['minecraft'] = "/home/minecraft/server/world" outputdir = "/home/minecraft/render/" markers = [ dict(name="Players", filterFunction=playerIcons), dict(name="Signs", filterFunction=signFilter) ] renders["day"] = { 'world': 'minecraft', 'title': 'Day', 'rendermode': 'smooth_lighting', 'markers': markers } renders["night"] = { 'world': 'minecraft', 'title': 'Night', 'rendermode': 'smooth_night', 'markers': markers }
Add filter text to signs
Add filter text to signs
Python
mit
mide/minecraft-overviewer,StefanBossbaly/minecraft-overviewer,StefanBossbaly/minecraft-overviewer,mide/minecraft-overviewer
python
## Code Before: def playerIcons(poi): if poi['id'] == 'Player': poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId'] return "Last known location for %s" % poi['EntityId'] def signFilter(poi): if poi['id'] == 'Sign': return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']]) worlds['minecraft'] = "/home/minecraft/server/world" outputdir = "/home/minecraft/render/" markers = [ dict(name="Players", filterFunction=playerIcons), dict(name="Signs", filterFunction=signFilter) ] renders["day"] = { 'world': 'minecraft', 'title': 'Day', 'rendermode': 'smooth_lighting', 'markers': markers } renders["night"] = { 'world': 'minecraft', 'title': 'Night', 'rendermode': 'smooth_night', 'markers': markers } ## Instruction: Add filter text to signs ## Code After: def playerIcons(poi): if poi['id'] == 'Player': poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId'] return "Last known location for %s" % poi['EntityId'] # Only signs with "-- RENDER --" on the last line will be shown # Otherwise, people can't have secret bases and the render is too busy anyways. def signFilter(poi): if poi['id'] == 'Sign': if poi['Text4'] == '-- RENDER --': return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']]) worlds['minecraft'] = "/home/minecraft/server/world" outputdir = "/home/minecraft/render/" markers = [ dict(name="Players", filterFunction=playerIcons), dict(name="Signs", filterFunction=signFilter) ] renders["day"] = { 'world': 'minecraft', 'title': 'Day', 'rendermode': 'smooth_lighting', 'markers': markers } renders["night"] = { 'world': 'minecraft', 'title': 'Night', 'rendermode': 'smooth_night', 'markers': markers }
// ... existing code ... poi['icon'] = "http://overviewer.org/avatar/%s" % poi['EntityId'] return "Last known location for %s" % poi['EntityId'] # Only signs with "-- RENDER --" on the last line will be shown # Otherwise, people can't have secret bases and the render is too busy anyways. def signFilter(poi): if poi['id'] == 'Sign': if poi['Text4'] == '-- RENDER --': return "\n".join([poi['Text1'], poi['Text2'], poi['Text3'], poi['Text4']]) worlds['minecraft'] = "/home/minecraft/server/world" outputdir = "/home/minecraft/render/" // ... rest of the code ...
51701b35d9ef9401abf0d86fd5726e669326390d
scripts/nipy_4dto3D.py
scripts/nipy_4dto3D.py
''' Tiny script to write 4D files in any format that we read (nifti, analyze, MINC, at the moment, as nifti 3D files ''' import os import sys import nipy.io.imageformats as nii if __name__ == '__main__': try: fname = sys.argv[1] except IndexError: raise OSError('Expecting 4d image filename') img = nii.load(fname) imgs = nii.four_to_three(img) froot, ext = os.path.splitext(fname) if ext in ('.gz', '.bz2'): froot, ext = os.path.splitext(froot) for i, img3d in enumerate(imgs): fname3d = '%s_%04d.nii' % (froot, i) nii.save(img3d, fname3d)
''' Tiny script to write 4D files in any format that we read (nifti, analyze, MINC, at the moment, as nifti 3D files ''' import os import nipy.externals.argparse as argparse import nipy.io.imageformats as nii def main(): # create the parser parser = argparse.ArgumentParser() # add the arguments parser.add_argument('filename', type=str, help='4D image filename') # parse the command line args = parser.parse_args() img = nii.load(args.filename) imgs = nii.four_to_three(img) froot, ext = os.path.splitext(args.filename) if ext in ('.gz', '.bz2'): froot, ext = os.path.splitext(froot) for i, img3d in enumerate(imgs): fname3d = '%s_%04d.nii' % (froot, i) nii.save(img3d, fname3d) if __name__ == '__main__': main()
Use argparse for 4D to 3D
Use argparse for 4D to 3D
Python
bsd-3-clause
nipy/nipy-labs,arokem/nipy,bthirion/nipy,alexis-roche/register,arokem/nipy,alexis-roche/niseg,bthirion/nipy,alexis-roche/nipy,bthirion/nipy,nipy/nireg,alexis-roche/nireg,nipy/nipy-labs,alexis-roche/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/register,alexis-roche/nireg,nipy/nireg,alexis-roche/register,alexis-roche/niseg,alexis-roche/nipy,arokem/nipy,arokem/nipy
python
## Code Before: ''' Tiny script to write 4D files in any format that we read (nifti, analyze, MINC, at the moment, as nifti 3D files ''' import os import sys import nipy.io.imageformats as nii if __name__ == '__main__': try: fname = sys.argv[1] except IndexError: raise OSError('Expecting 4d image filename') img = nii.load(fname) imgs = nii.four_to_three(img) froot, ext = os.path.splitext(fname) if ext in ('.gz', '.bz2'): froot, ext = os.path.splitext(froot) for i, img3d in enumerate(imgs): fname3d = '%s_%04d.nii' % (froot, i) nii.save(img3d, fname3d) ## Instruction: Use argparse for 4D to 3D ## Code After: ''' Tiny script to write 4D files in any format that we read (nifti, analyze, MINC, at the moment, as nifti 3D files ''' import os import nipy.externals.argparse as argparse import nipy.io.imageformats as nii def main(): # create the parser parser = argparse.ArgumentParser() # add the arguments parser.add_argument('filename', type=str, help='4D image filename') # parse the command line args = parser.parse_args() img = nii.load(args.filename) imgs = nii.four_to_three(img) froot, ext = os.path.splitext(args.filename) if ext in ('.gz', '.bz2'): froot, ext = os.path.splitext(froot) for i, img3d in enumerate(imgs): fname3d = '%s_%04d.nii' % (froot, i) nii.save(img3d, fname3d) if __name__ == '__main__': main()
# ... existing code ... analyze, MINC, at the moment, as nifti 3D files ''' import os import nipy.externals.argparse as argparse import nipy.io.imageformats as nii def main(): # create the parser parser = argparse.ArgumentParser() # add the arguments parser.add_argument('filename', type=str, help='4D image filename') # parse the command line args = parser.parse_args() img = nii.load(args.filename) imgs = nii.four_to_three(img) froot, ext = os.path.splitext(args.filename) if ext in ('.gz', '.bz2'): froot, ext = os.path.splitext(froot) for i, img3d in enumerate(imgs): fname3d = '%s_%04d.nii' % (froot, i) nii.save(img3d, fname3d) if __name__ == '__main__': main() # ... rest of the code ...
eb6557e2342b3605b434ba151ba54bdb61f2d96b
cdm/src/test/java/thredds/catalog2/TestAll.java
cdm/src/test/java/thredds/catalog2/TestAll.java
package thredds.catalog2; import junit.framework.*; import thredds.catalog2.simpleImpl.*; import thredds.catalog2.xml.parser.TestCatalogParser; /** * _more_ * * @author edavis * @since 4.0 */ public class TestAll extends TestCase { public TestAll( String name ) { super( name ); } public static Test suite() { TestSuite suite = new TestSuite(); // Tests in thredds.catalog2.simpleImpl suite.addTestSuite( TestPropertyImpl.class ); suite.addTestSuite( TestPropertyContainer.class ); suite.addTestSuite( TestServiceImpl.class ); suite.addTestSuite( TestServiceContainer.class ); suite.addTestSuite( TestAccessImpl.class ); suite.addTestSuite( TestCatalogImpl.class ); // Tests in thredds.catalog2.xml suite.addTestSuite( TestCatalogParser.class ); return suite; } }
package thredds.catalog2; import junit.framework.*; import thredds.catalog2.simpleImpl.*; import thredds.catalog2.xml.parser.TestCatalogParser; /** * _more_ * * @author edavis * @since 4.0 */ public class TestAll extends TestCase { public TestAll( String name ) { super( name ); } public static Test suite() { TestSuite suite = new TestSuite(); // Tests in thredds.catalog2.simpleImpl suite.addTestSuite( TestPropertyImpl.class ); suite.addTestSuite( TestPropertyContainer.class ); suite.addTestSuite( TestServiceImpl.class ); suite.addTestSuite( TestServiceContainer.class ); suite.addTestSuite( TestDatasetNodeImpl.class ); suite.addTestSuite( TestAccessImpl.class ); //suite.addTestSuite( TestCatalogRefImpl.class ); suite.addTestSuite( TestCatalogImpl.class ); // Tests in thredds.catalog2.xml suite.addTestSuite( TestCatalogParser.class ); return suite; } }
Work on thredds.catalog2: working on unit tests.
Work on thredds.catalog2: working on unit tests.
Java
bsd-3-clause
Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java
java
## Code Before: package thredds.catalog2; import junit.framework.*; import thredds.catalog2.simpleImpl.*; import thredds.catalog2.xml.parser.TestCatalogParser; /** * _more_ * * @author edavis * @since 4.0 */ public class TestAll extends TestCase { public TestAll( String name ) { super( name ); } public static Test suite() { TestSuite suite = new TestSuite(); // Tests in thredds.catalog2.simpleImpl suite.addTestSuite( TestPropertyImpl.class ); suite.addTestSuite( TestPropertyContainer.class ); suite.addTestSuite( TestServiceImpl.class ); suite.addTestSuite( TestServiceContainer.class ); suite.addTestSuite( TestAccessImpl.class ); suite.addTestSuite( TestCatalogImpl.class ); // Tests in thredds.catalog2.xml suite.addTestSuite( TestCatalogParser.class ); return suite; } } ## Instruction: Work on thredds.catalog2: working on unit tests. ## Code After: package thredds.catalog2; import junit.framework.*; import thredds.catalog2.simpleImpl.*; import thredds.catalog2.xml.parser.TestCatalogParser; /** * _more_ * * @author edavis * @since 4.0 */ public class TestAll extends TestCase { public TestAll( String name ) { super( name ); } public static Test suite() { TestSuite suite = new TestSuite(); // Tests in thredds.catalog2.simpleImpl suite.addTestSuite( TestPropertyImpl.class ); suite.addTestSuite( TestPropertyContainer.class ); suite.addTestSuite( TestServiceImpl.class ); suite.addTestSuite( TestServiceContainer.class ); suite.addTestSuite( TestDatasetNodeImpl.class ); suite.addTestSuite( TestAccessImpl.class ); //suite.addTestSuite( TestCatalogRefImpl.class ); suite.addTestSuite( TestCatalogImpl.class ); // Tests in thredds.catalog2.xml suite.addTestSuite( TestCatalogParser.class ); return suite; } }
# ... existing code ... suite.addTestSuite( TestPropertyContainer.class ); suite.addTestSuite( TestServiceImpl.class ); suite.addTestSuite( TestServiceContainer.class ); suite.addTestSuite( TestDatasetNodeImpl.class ); suite.addTestSuite( TestAccessImpl.class ); //suite.addTestSuite( TestCatalogRefImpl.class ); suite.addTestSuite( TestCatalogImpl.class ); // Tests in thredds.catalog2.xml # ... rest of the code ...
6d8b6cfe9e2de860b4b39a1e0f0bb8fa45e6b96f
manage.py
manage.py
from flask.ext.script import Manager, prompt, prompt_bool, prompt_pass from db_create import ( init_db, drop_db, init_admin_user, init_entry, init_category, init_tag ) from flask.ext.migrate import MigrateCommand from logpot.app import app import os if os.path.exists('.env'): print('Importing environment from .env...') for line in open('.env'): var = line.strip().split('=') if len(var) == 2: os.environ[var[0]] = var[1] manager = Manager(app) manager.add_command('db', MigrateCommand) @manager.command def run(): app.run(threaded=True) @manager.command def initialize(): if prompt_bool("Are you sure you want to create DB and initialize?"): drop_db() init_db() if init_admin(): init_category() init_tag() init_entry() print('Success!') @manager.command def init_admin(): name = prompt('Resister admin user.\n[?] input username: ') email = prompt('[?] input email: ') password = prompt_pass('[?] input password: ') confirm_password = prompt_pass('[?] input password again: ') if not password == confirm_password: print('Password does not match.') return False else: init_admin_user(name, email, password) return True if __name__ == "__main__": manager.run()
import os if os.path.exists('.env'): print('Importing environment from .env...') for line in open('.env'): var = line.strip().split('=') if len(var) == 2: os.environ[var[0]] = var[1] from flask.ext.script import Manager, prompt, prompt_bool, prompt_pass from db_create import ( init_db, drop_db, init_admin_user, init_entry, init_category, init_tag ) from flask.ext.migrate import MigrateCommand from logpot.app import app manager = Manager(app) manager.add_command('db', MigrateCommand) @manager.command def run(): app.run(threaded=True) @manager.command def initialize(): if prompt_bool("Are you sure you want to create DB and initialize?"): drop_db() init_db() if init_admin(): init_category() init_tag() init_entry() print('Success!') @manager.command def init_admin(): name = prompt('Resister admin user.\n[?] input username: ') email = prompt('[?] input email: ') password = prompt_pass('[?] input password: ') confirm_password = prompt_pass('[?] input password again: ') if not password == confirm_password: print('Password does not match.') return False else: init_admin_user(name, email, password) return True if __name__ == "__main__": manager.run()
Fix import location of environment variables
Fix import location of environment variables
Python
mit
moremorefor/Logpot,moremorefor/Logpot,moremorefor/Logpot
python
## Code Before: from flask.ext.script import Manager, prompt, prompt_bool, prompt_pass from db_create import ( init_db, drop_db, init_admin_user, init_entry, init_category, init_tag ) from flask.ext.migrate import MigrateCommand from logpot.app import app import os if os.path.exists('.env'): print('Importing environment from .env...') for line in open('.env'): var = line.strip().split('=') if len(var) == 2: os.environ[var[0]] = var[1] manager = Manager(app) manager.add_command('db', MigrateCommand) @manager.command def run(): app.run(threaded=True) @manager.command def initialize(): if prompt_bool("Are you sure you want to create DB and initialize?"): drop_db() init_db() if init_admin(): init_category() init_tag() init_entry() print('Success!') @manager.command def init_admin(): name = prompt('Resister admin user.\n[?] input username: ') email = prompt('[?] input email: ') password = prompt_pass('[?] input password: ') confirm_password = prompt_pass('[?] input password again: ') if not password == confirm_password: print('Password does not match.') return False else: init_admin_user(name, email, password) return True if __name__ == "__main__": manager.run() ## Instruction: Fix import location of environment variables ## Code After: import os if os.path.exists('.env'): print('Importing environment from .env...') for line in open('.env'): var = line.strip().split('=') if len(var) == 2: os.environ[var[0]] = var[1] from flask.ext.script import Manager, prompt, prompt_bool, prompt_pass from db_create import ( init_db, drop_db, init_admin_user, init_entry, init_category, init_tag ) from flask.ext.migrate import MigrateCommand from logpot.app import app manager = Manager(app) manager.add_command('db', MigrateCommand) @manager.command def run(): app.run(threaded=True) @manager.command def initialize(): if prompt_bool("Are you sure you want to create DB and initialize?"): drop_db() init_db() if init_admin(): init_category() init_tag() init_entry() print('Success!') @manager.command def init_admin(): name = prompt('Resister admin user.\n[?] input username: ') email = prompt('[?] input email: ') password = prompt_pass('[?] input password: ') confirm_password = prompt_pass('[?] input password again: ') if not password == confirm_password: print('Password does not match.') return False else: init_admin_user(name, email, password) return True if __name__ == "__main__": manager.run()
// ... existing code ... import os if os.path.exists('.env'): print('Importing environment from .env...') for line in open('.env'): var = line.strip().split('=') if len(var) == 2: os.environ[var[0]] = var[1] from flask.ext.script import Manager, prompt, prompt_bool, prompt_pass from db_create import ( // ... modified code ... ) from flask.ext.migrate import MigrateCommand from logpot.app import app manager = Manager(app) manager.add_command('db', MigrateCommand) // ... rest of the code ...
877930fd440eb6d1d280d2a3dce8893eb99020df
src/designer/com/opensymphony/workflow/designer/editor/DetailPanel.java
src/designer/com/opensymphony/workflow/designer/editor/DetailPanel.java
package com.opensymphony.workflow.designer.editor; import javax.swing.*; import com.opensymphony.workflow.designer.WorkflowCell; import com.opensymphony.workflow.designer.WorkflowEdge; /** * @author Hani Suleiman ([email protected]) * Date: May 20, 2003 * Time: 10:27:26 AM */ public abstract class DetailPanel extends JPanel { private WorkflowCell cell; private WorkflowEdge edge; private boolean componentsInited = false; public WorkflowCell getCell() { return cell; } public WorkflowEdge getEdge() { return edge; } protected void viewClosed() { } public final void closeView() { viewClosed(); } public final void setCell(WorkflowCell cell) { if(!componentsInited) initComponents(); this.cell = cell; setName(cell.getClass().getName()); updateView(); } protected abstract void initComponents(); protected abstract void updateView(); public String getTitle() { return "Details"; } public void setEdge(WorkflowEdge edge) { if(!componentsInited) initComponents(); this.edge = edge; setName(edge.getClass().getName()); updateView(); } }
package com.opensymphony.workflow.designer.editor; import javax.swing.*; import com.opensymphony.workflow.designer.WorkflowCell; import com.opensymphony.workflow.designer.WorkflowEdge; /** * @author Hani Suleiman ([email protected]) * Date: May 20, 2003 * Time: 10:27:26 AM */ public abstract class DetailPanel extends JPanel { private WorkflowCell cell; private WorkflowEdge edge; private boolean componentsInited = false; public WorkflowCell getCell() { return cell; } public WorkflowEdge getEdge() { return edge; } protected void viewClosed() { } public final void closeView() { viewClosed(); } public final void setCell(WorkflowCell cell) { if(!componentsInited) { initComponents(); componentsInited = true; } this.cell = cell; setName(cell.getClass().getName()); updateView(); } protected abstract void initComponents(); protected abstract void updateView(); public String getTitle() { return "Details"; } public void setEdge(WorkflowEdge edge) { if(!componentsInited) { initComponents(); componentsInited = true; } componentsInited = true; this.edge = edge; setName(edge.getClass().getName()); updateView(); } }
Set componentsInited once they get inited
Set componentsInited once they get inited
Java
mit
tedliang/osworkflow,tedliang/osworkflow,tedliang/osworkflow,tedliang/osworkflow
java
## Code Before: package com.opensymphony.workflow.designer.editor; import javax.swing.*; import com.opensymphony.workflow.designer.WorkflowCell; import com.opensymphony.workflow.designer.WorkflowEdge; /** * @author Hani Suleiman ([email protected]) * Date: May 20, 2003 * Time: 10:27:26 AM */ public abstract class DetailPanel extends JPanel { private WorkflowCell cell; private WorkflowEdge edge; private boolean componentsInited = false; public WorkflowCell getCell() { return cell; } public WorkflowEdge getEdge() { return edge; } protected void viewClosed() { } public final void closeView() { viewClosed(); } public final void setCell(WorkflowCell cell) { if(!componentsInited) initComponents(); this.cell = cell; setName(cell.getClass().getName()); updateView(); } protected abstract void initComponents(); protected abstract void updateView(); public String getTitle() { return "Details"; } public void setEdge(WorkflowEdge edge) { if(!componentsInited) initComponents(); this.edge = edge; setName(edge.getClass().getName()); updateView(); } } ## Instruction: Set componentsInited once they get inited ## Code After: package com.opensymphony.workflow.designer.editor; import javax.swing.*; import com.opensymphony.workflow.designer.WorkflowCell; import com.opensymphony.workflow.designer.WorkflowEdge; /** * @author Hani Suleiman ([email protected]) * Date: May 20, 2003 * Time: 10:27:26 AM */ public abstract class DetailPanel extends JPanel { private WorkflowCell cell; private WorkflowEdge edge; private boolean componentsInited = false; public WorkflowCell getCell() { return cell; } public WorkflowEdge getEdge() { return edge; } protected void viewClosed() { } public final void closeView() { viewClosed(); } public final void setCell(WorkflowCell cell) { if(!componentsInited) { initComponents(); componentsInited = true; } this.cell = cell; setName(cell.getClass().getName()); updateView(); } protected abstract void initComponents(); protected abstract void updateView(); public String getTitle() { return "Details"; } public void setEdge(WorkflowEdge edge) { if(!componentsInited) { initComponents(); componentsInited = true; } componentsInited = true; this.edge = edge; setName(edge.getClass().getName()); updateView(); } }
// ... existing code ... } protected void viewClosed() { } public final void closeView() // ... modified code ... public final void setCell(WorkflowCell cell) { if(!componentsInited) { initComponents(); componentsInited = true; } this.cell = cell; setName(cell.getClass().getName()); updateView(); ... public void setEdge(WorkflowEdge edge) { if(!componentsInited) { initComponents(); componentsInited = true; } componentsInited = true; this.edge = edge; setName(edge.getClass().getName()); updateView(); // ... rest of the code ...
d0fbd28a7230f6500a19f5cf6216f18eef07d1d9
src/scenarios/base.h
src/scenarios/base.h
namespace scenarios { class base : public boost::noncopyable { public: virtual ~base() {}; virtual agents apply(const agents& source) const = 0; }; template<typename DERIVED> class scenario : public base { public: scenario(const boost::property_tree::ptree& config); virtual ~scenario() override; protected: private: static factory<base, boost::property_tree::ptree>::registration<DERIVED> s_factory; }; ////////////////////////////////// template<typename DERIVED> factory<base, boost::property_tree::ptree>::registration<DERIVED> scenario<DERIVED>::s_factory; template<typename DERIVED> scenario<DERIVED>::scenario(const boost::property_tree::ptree& config) { } template<typename DERIVED> scenario<DERIVED>::~scenario() { // a dummy statement to make sure the factory doesn't get optimized away by GCC boost::lexical_cast<std::string>(&s_factory); } }
namespace scenarios { class base : public boost::noncopyable { public: base(const boost::property_tree::ptree& config) {} virtual ~base() {}; virtual agents apply(const agents& source) const = 0; }; template<typename DERIVED, typename BASE = base> class scenario : public BASE { public: scenario(const boost::property_tree::ptree& config); virtual ~scenario() override; protected: private: static factory<base, boost::property_tree::ptree>::registration<DERIVED> s_factory; }; ////////////////////////////////// template<typename DERIVED, typename BASE> factory<base, boost::property_tree::ptree>::registration<DERIVED> scenario<DERIVED, BASE>::s_factory; template<typename DERIVED, typename BASE> scenario<DERIVED, BASE>::scenario(const boost::property_tree::ptree& config) : BASE(config) { } template<typename DERIVED, typename BASE> scenario<DERIVED, BASE>::~scenario() { // a dummy statement to make sure the factory doesn't get optimized away by GCC boost::lexical_cast<std::string>(&s_factory); } }
Allow scenarios inheritance for scenarios::scenario template
Allow scenarios inheritance for scenarios::scenario template
C
mit
martin-pr/group_motion_editing,martin-pr/group_motion_editing
c
## Code Before: namespace scenarios { class base : public boost::noncopyable { public: virtual ~base() {}; virtual agents apply(const agents& source) const = 0; }; template<typename DERIVED> class scenario : public base { public: scenario(const boost::property_tree::ptree& config); virtual ~scenario() override; protected: private: static factory<base, boost::property_tree::ptree>::registration<DERIVED> s_factory; }; ////////////////////////////////// template<typename DERIVED> factory<base, boost::property_tree::ptree>::registration<DERIVED> scenario<DERIVED>::s_factory; template<typename DERIVED> scenario<DERIVED>::scenario(const boost::property_tree::ptree& config) { } template<typename DERIVED> scenario<DERIVED>::~scenario() { // a dummy statement to make sure the factory doesn't get optimized away by GCC boost::lexical_cast<std::string>(&s_factory); } } ## Instruction: Allow scenarios inheritance for scenarios::scenario template ## Code After: namespace scenarios { class base : public boost::noncopyable { public: base(const boost::property_tree::ptree& config) {} virtual ~base() {}; virtual agents apply(const agents& source) const = 0; }; template<typename DERIVED, typename BASE = base> class scenario : public BASE { public: scenario(const boost::property_tree::ptree& config); virtual ~scenario() override; protected: private: static factory<base, boost::property_tree::ptree>::registration<DERIVED> s_factory; }; ////////////////////////////////// template<typename DERIVED, typename BASE> factory<base, boost::property_tree::ptree>::registration<DERIVED> scenario<DERIVED, BASE>::s_factory; template<typename DERIVED, typename BASE> scenario<DERIVED, BASE>::scenario(const boost::property_tree::ptree& config) : BASE(config) { } template<typename DERIVED, typename BASE> scenario<DERIVED, BASE>::~scenario() { // a dummy statement to make sure the factory doesn't get optimized away by GCC boost::lexical_cast<std::string>(&s_factory); } }
... class base : public boost::noncopyable { public: base(const boost::property_tree::ptree& config) {} virtual ~base() {}; virtual agents apply(const agents& source) const = 0; }; template<typename DERIVED, typename BASE = base> class scenario : public BASE { public: scenario(const boost::property_tree::ptree& config); virtual ~scenario() override; ... ////////////////////////////////// template<typename DERIVED, typename BASE> factory<base, boost::property_tree::ptree>::registration<DERIVED> scenario<DERIVED, BASE>::s_factory; template<typename DERIVED, typename BASE> scenario<DERIVED, BASE>::scenario(const boost::property_tree::ptree& config) : BASE(config) { } template<typename DERIVED, typename BASE> scenario<DERIVED, BASE>::~scenario() { // a dummy statement to make sure the factory doesn't get optimized away by GCC boost::lexical_cast<std::string>(&s_factory); } ...
06c3e03db75617b824eae088053a9fc563b936a7
virtool/user_permissions.py
virtool/user_permissions.py
PERMISSIONS = [ "cancel_job", "create_sample", "create_subtraction", "manage_users", "modify_hmm", "modify_options", "modify_virus", "rebuild_index", "remove_job", "remove_virus" ]
PERMISSIONS = [ "cancel_job", "create_sample", "manage_users", "modify_hmm", "modify_options", "modify_subtraction", "modify_virus", "rebuild_index", "remove_job", "remove_virus" ]
Change create_subtraction permission to modify_subtraction
Change create_subtraction permission to modify_subtraction
Python
mit
virtool/virtool,virtool/virtool,igboyes/virtool,igboyes/virtool
python
## Code Before: PERMISSIONS = [ "cancel_job", "create_sample", "create_subtraction", "manage_users", "modify_hmm", "modify_options", "modify_virus", "rebuild_index", "remove_job", "remove_virus" ] ## Instruction: Change create_subtraction permission to modify_subtraction ## Code After: PERMISSIONS = [ "cancel_job", "create_sample", "manage_users", "modify_hmm", "modify_options", "modify_subtraction", "modify_virus", "rebuild_index", "remove_job", "remove_virus" ]
... PERMISSIONS = [ "cancel_job", "create_sample", "manage_users", "modify_hmm", "modify_options", "modify_subtraction", "modify_virus", "rebuild_index", "remove_job", ...
75ff727cd29ae1b379c551f46217fa75bf0fb2bc
videoeditor.py
videoeditor.py
from moviepy.editor import * def bake_annotations(video_file, end_point, annotations): clip = VideoFileClip(video_file) composite_clips = [clip] #for annotation in annotations: # txt_clip = TextClip(annotation["text"], color="white", fontsize=70) # txt_clip = txt_clip.set_position(("center", "bottom")) # txt_clip = txt_clip.set_duration(0.5) # txt_clip = txt_clip.set_start(float(annotation["time"]) / 1000.0) # composite_clips.append(txt_clip) #final_video = CompositeVideoClip(composite_clips) final_video = generate_pauses(clip, annotations) final_video.write_videofile("video-out/" + end_point, audio=False) def generate_pauses(video_clip, annotations): """Takes in a regular video clip, and bakes in annotation pauses""" pause_time = 1 for annotation in reversed(annotations): current_annotation_time = annotation["time"] / 1000.0 video_clip = video_clip.fx(vfx.freeze, t=current_annotation_time, freeze_duration=pause_time) return video_clip
from moviepy.editor import * def bake_annotations(video_file, end_point, annotations): clip = VideoFileClip(video_file) composite_clips = [clip] #for annotation in annotations: # txt_clip = TextClip(annotation["text"], color="white", fontsize=70) # txt_clip = txt_clip.set_position(("center", "bottom")) # txt_clip = txt_clip.set_duration(0.5) # txt_clip = txt_clip.set_start(float(annotation["time"]) / 1000.0) # composite_clips.append(txt_clip) #final_video = CompositeVideoClip(composite_clips) final_video = generate_pauses(clip, annotations) final_video.write_videofile("video-out/" + end_point) def generate_pauses(video_clip, annotations): """Takes in a regular video clip, and bakes in annotation pauses""" for annotation in reversed(annotations): pause_time = len(annotation["text"]) * 0.4 current_annotation_time = annotation["time"] / 1000.0 video_clip = video_clip.fx(vfx.freeze, t=current_annotation_time, freeze_duration=pause_time) return video_clip
Make pause dependant on annotation text length
Make pause dependant on annotation text length
Python
mit
melonmanchan/achso-video-exporter,melonmanchan/achso-video-exporter
python
## Code Before: from moviepy.editor import * def bake_annotations(video_file, end_point, annotations): clip = VideoFileClip(video_file) composite_clips = [clip] #for annotation in annotations: # txt_clip = TextClip(annotation["text"], color="white", fontsize=70) # txt_clip = txt_clip.set_position(("center", "bottom")) # txt_clip = txt_clip.set_duration(0.5) # txt_clip = txt_clip.set_start(float(annotation["time"]) / 1000.0) # composite_clips.append(txt_clip) #final_video = CompositeVideoClip(composite_clips) final_video = generate_pauses(clip, annotations) final_video.write_videofile("video-out/" + end_point, audio=False) def generate_pauses(video_clip, annotations): """Takes in a regular video clip, and bakes in annotation pauses""" pause_time = 1 for annotation in reversed(annotations): current_annotation_time = annotation["time"] / 1000.0 video_clip = video_clip.fx(vfx.freeze, t=current_annotation_time, freeze_duration=pause_time) return video_clip ## Instruction: Make pause dependant on annotation text length ## Code After: from moviepy.editor import * def bake_annotations(video_file, end_point, annotations): clip = VideoFileClip(video_file) composite_clips = [clip] #for annotation in annotations: # txt_clip = TextClip(annotation["text"], color="white", fontsize=70) # txt_clip = txt_clip.set_position(("center", "bottom")) # txt_clip = txt_clip.set_duration(0.5) # txt_clip = txt_clip.set_start(float(annotation["time"]) / 1000.0) # composite_clips.append(txt_clip) #final_video = CompositeVideoClip(composite_clips) final_video = generate_pauses(clip, annotations) final_video.write_videofile("video-out/" + end_point) def generate_pauses(video_clip, annotations): """Takes in a regular video clip, and bakes in annotation pauses""" for annotation in reversed(annotations): pause_time = len(annotation["text"]) * 0.4 current_annotation_time = annotation["time"] / 1000.0 video_clip = video_clip.fx(vfx.freeze, t=current_annotation_time, freeze_duration=pause_time) return video_clip
// ... existing code ... #final_video = CompositeVideoClip(composite_clips) final_video = generate_pauses(clip, annotations) final_video.write_videofile("video-out/" + end_point) def generate_pauses(video_clip, annotations): """Takes in a regular video clip, and bakes in annotation pauses""" for annotation in reversed(annotations): pause_time = len(annotation["text"]) * 0.4 current_annotation_time = annotation["time"] / 1000.0 video_clip = video_clip.fx(vfx.freeze, t=current_annotation_time, freeze_duration=pause_time) // ... rest of the code ...
d2e78f98043aba4bfcae9957703b3fac044dade9
core/api/src/main/java/org/onosproject/store/Timestamp.java
core/api/src/main/java/org/onosproject/store/Timestamp.java
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store; import static com.google.common.base.Preconditions.checkNotNull; /** * Opaque version structure. * <p> * Classes implementing this interface must also implement * {@link #hashCode()} and {@link #equals(Object)}. */ public interface Timestamp extends Comparable<Timestamp> { @Override public abstract int hashCode(); @Override public abstract boolean equals(Object obj); /** * Tests if this timestamp is newer than the specified timestamp. * * @param other timestamp to compare against * @return true if this instance is newer */ public default boolean isNewerThan(Timestamp other) { return this.compareTo(checkNotNull(other)) > 0; } }
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store; import static com.google.common.base.Preconditions.checkNotNull; /** * Opaque version structure. * <p> * Classes implementing this interface must also implement * {@link #hashCode()} and {@link #equals(Object)}. */ public interface Timestamp extends Comparable<Timestamp> { @Override int hashCode(); @Override boolean equals(Object obj); /** * Tests if this timestamp is newer than the specified timestamp. * * @param other timestamp to compare against * @return true if this instance is newer */ default boolean isNewerThan(Timestamp other) { return this.compareTo(checkNotNull(other)) > 0; } }
Remove unnecessary modifiers to follow the convention
Remove unnecessary modifiers to follow the convention Change-Id: Ie8ff539252df6ed9df5ff827d639166a78fbf18d
Java
apache-2.0
rvhub/onos,oplinkoms/onos,y-higuchi/onos,rvhub/onos,oplinkoms/onos,donNewtonAlpha/onos,VinodKumarS-Huawei/ietf96yang,mengmoya/onos,CNlukai/onos-gerrit-test,gkatsikas/onos,osinstom/onos,oeeagle/onos,zsh2938/onos,planoAccess/clonedONOS,VinodKumarS-Huawei/ietf96yang,maheshraju-Huawei/actn,y-higuchi/onos,CNlukai/onos-gerrit-test,maheshraju-Huawei/actn,jinlongliu/onos,y-higuchi/onos,chenxiuyang/onos,osinstom/onos,sdnwiselab/onos,osinstom/onos,chinghanyu/onos,opennetworkinglab/onos,donNewtonAlpha/onos,LorenzReinhart/ONOSnew,rvhub/onos,rvhub/onos,jinlongliu/onos,LorenzReinhart/ONOSnew,planoAccess/clonedONOS,mengmoya/onos,planoAccess/clonedONOS,jmiserez/onos,Shashikanth-Huawei/bmp,kuujo/onos,VinodKumarS-Huawei/ietf96yang,jmiserez/onos,opennetworkinglab/onos,castroflavio/onos,sdnwiselab/onos,gkatsikas/onos,oplinkoms/onos,chenxiuyang/onos,sdnwiselab/onos,kuujo/onos,packet-tracker/onos,castroflavio/onos,osinstom/onos,jinlongliu/onos,sdnwiselab/onos,LorenzReinhart/ONOSnew,lsinfo3/onos,kuujo/onos,jmiserez/onos,kkkane/ONOS,lsinfo3/onos,kkkane/ONOS,oplinkoms/onos,sonu283304/onos,kuujo/onos,osinstom/onos,maheshraju-Huawei/actn,LorenzReinhart/ONOSnew,Shashikanth-Huawei/bmp,kkkane/ONOS,VinodKumarS-Huawei/ietf96yang,oplinkoms/onos,oeeagle/onos,Shashikanth-Huawei/bmp,Shashikanth-Huawei/bmp,kkkane/ONOS,packet-tracker/onos,gkatsikas/onos,donNewtonAlpha/onos,oeeagle/onos,packet-tracker/onos,castroflavio/onos,zsh2938/onos,opennetworkinglab/onos,gkatsikas/onos,maheshraju-Huawei/actn,VinodKumarS-Huawei/ietf96yang,opennetworkinglab/onos,Shashikanth-Huawei/bmp,y-higuchi/onos,lsinfo3/onos,sdnwiselab/onos,oplinkoms/onos,zsh2938/onos,donNewtonAlpha/onos,sonu283304/onos,gkatsikas/onos,mengmoya/onos,kuujo/onos,lsinfo3/onos,zsh2938/onos,maheshraju-Huawei/actn,planoAccess/clonedONOS,gkatsikas/onos,LorenzReinhart/ONOSnew,CNlukai/onos-gerrit-test,sonu283304/onos,chenxiuyang/onos,mengmoya/onos,CNlukai/onos-gerrit-test,chinghanyu/onos,chinghanyu/onos,kuujo/onos,sonu283304/onos,jmiserez/onos,donNewtonAlpha/onos,opennetworkinglab/onos,chinghanyu/onos,packet-tracker/onos,oplinkoms/onos,sdnwiselab/onos,oeeagle/onos,chenxiuyang/onos,y-higuchi/onos,jinlongliu/onos,kuujo/onos,castroflavio/onos,mengmoya/onos,opennetworkinglab/onos
java
## Code Before: /* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store; import static com.google.common.base.Preconditions.checkNotNull; /** * Opaque version structure. * <p> * Classes implementing this interface must also implement * {@link #hashCode()} and {@link #equals(Object)}. */ public interface Timestamp extends Comparable<Timestamp> { @Override public abstract int hashCode(); @Override public abstract boolean equals(Object obj); /** * Tests if this timestamp is newer than the specified timestamp. * * @param other timestamp to compare against * @return true if this instance is newer */ public default boolean isNewerThan(Timestamp other) { return this.compareTo(checkNotNull(other)) > 0; } } ## Instruction: Remove unnecessary modifiers to follow the convention Change-Id: Ie8ff539252df6ed9df5ff827d639166a78fbf18d ## Code After: /* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store; import static com.google.common.base.Preconditions.checkNotNull; /** * Opaque version structure. * <p> * Classes implementing this interface must also implement * {@link #hashCode()} and {@link #equals(Object)}. */ public interface Timestamp extends Comparable<Timestamp> { @Override int hashCode(); @Override boolean equals(Object obj); /** * Tests if this timestamp is newer than the specified timestamp. * * @param other timestamp to compare against * @return true if this instance is newer */ default boolean isNewerThan(Timestamp other) { return this.compareTo(checkNotNull(other)) > 0; } }
... public interface Timestamp extends Comparable<Timestamp> { @Override int hashCode(); @Override boolean equals(Object obj); /** * Tests if this timestamp is newer than the specified timestamp. ... * @param other timestamp to compare against * @return true if this instance is newer */ default boolean isNewerThan(Timestamp other) { return this.compareTo(checkNotNull(other)) > 0; } } ...
c99ea848a39d22cb4347606b6cba97b98ce627fd
timesketch/api/v1/resources/information.py
timesketch/api/v1/resources/information.py
"""Information API for version 1 of the Timesketch API.""" from flask import jsonify from flask_restful import Resource from flask_login import login_required from timesketch import version from timesketch.api.v1 import resources from timesketch.lib.definitions import HTTP_STATUS_CODE_OK class VersionResource(resources.ResourceMixin, Resource): """Resource to get Timesketch API version information.""" @login_required def get(self): """Handles GET request to the resource. Returns: List of usernames """ schema = { 'meta': { 'version': version.get_version() }, 'objects': [] } response = jsonify(schema) response.status_code = HTTP_STATUS_CODE_OK return response
"""Information API for version 1 of the Timesketch API.""" from flask import jsonify from flask_restful import Resource from flask_login import login_required from timesketch import version from timesketch.api.v1 import resources from timesketch.lib.definitions import HTTP_STATUS_CODE_OK class VersionResource(resources.ResourceMixin, Resource): """Resource to get Timesketch API version information.""" @login_required def get(self): """Handles GET request to the resource. Returns: JSON object including version info """ schema = { 'meta': { 'version': version.get_version() }, 'objects': [] } response = jsonify(schema) response.status_code = HTTP_STATUS_CODE_OK return response
Fix method docstring (copy paste error)
Fix method docstring (copy paste error) Guess there was a copy paste error in this PR: https://github.com/google/timesketch/commit/64157452b7b8285ea928e4949434d46592791d47 As the method does not return user info.
Python
apache-2.0
google/timesketch,google/timesketch,google/timesketch,google/timesketch
python
## Code Before: """Information API for version 1 of the Timesketch API.""" from flask import jsonify from flask_restful import Resource from flask_login import login_required from timesketch import version from timesketch.api.v1 import resources from timesketch.lib.definitions import HTTP_STATUS_CODE_OK class VersionResource(resources.ResourceMixin, Resource): """Resource to get Timesketch API version information.""" @login_required def get(self): """Handles GET request to the resource. Returns: List of usernames """ schema = { 'meta': { 'version': version.get_version() }, 'objects': [] } response = jsonify(schema) response.status_code = HTTP_STATUS_CODE_OK return response ## Instruction: Fix method docstring (copy paste error) Guess there was a copy paste error in this PR: https://github.com/google/timesketch/commit/64157452b7b8285ea928e4949434d46592791d47 As the method does not return user info. ## Code After: """Information API for version 1 of the Timesketch API.""" from flask import jsonify from flask_restful import Resource from flask_login import login_required from timesketch import version from timesketch.api.v1 import resources from timesketch.lib.definitions import HTTP_STATUS_CODE_OK class VersionResource(resources.ResourceMixin, Resource): """Resource to get Timesketch API version information.""" @login_required def get(self): """Handles GET request to the resource. Returns: JSON object including version info """ schema = { 'meta': { 'version': version.get_version() }, 'objects': [] } response = jsonify(schema) response.status_code = HTTP_STATUS_CODE_OK return response
... """Handles GET request to the resource. Returns: JSON object including version info """ schema = { 'meta': { ...
1fde695a1625d503e54d90719c87460cf6f5a4c1
addressbook-web-tests/src/test/java/kurs/zadania/adressbook/appmanager/NavigationHelper.java
addressbook-web-tests/src/test/java/kurs/zadania/adressbook/appmanager/NavigationHelper.java
package kurs.zadania.adressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.firefox.FirefoxDriver; public class NavigationHelper extends HelperBase{ public NavigationHelper(WebDriver wd) { super(wd); } // public void gotoGroupPage() { click(By.linkText("groups")); } /* sprawdzanie elementu na stronie i przejście po linku - wariant 1 public void gotoGroupPage() { if (!isElementPresent(By.tagName("h1")) || !wd.findElement(By.tagName("h1")).getText().equals("Groups") || !isElementPresent(By.name("new"))) { click(By.linkText("groups")); } } */ // sprawdzanie elementu na stronie i przejście po linku - wariant 2 lepszy public void gotoGroupPage() { if (isElementPresent(By.tagName("h1")) && !wd.findElement(By.tagName("h1")).getText().equals("Groups") && !isElementPresent(By.name("new"))) { return; } click(By.linkText("groups")); } public void gotoMainPage() { if (isElementPresent(By.id("maintable"))) { return; } click(By.linkText("home")); } }
package kurs.zadania.adressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.firefox.FirefoxDriver; public class NavigationHelper extends HelperBase{ public NavigationHelper(WebDriver wd) { super(wd); } public void gotoGroupPage() { click(By.linkText("groups")); } public void gotoMainPage() { click(By.linkText("home")); } }
Revert "Revert "Revert "optimization of the transition between the pages"""
Revert "Revert "Revert "optimization of the transition between the pages""" This reverts commit 4ad3b97fcc8ee3476a910d0a891eb66b069f2843.
Java
apache-2.0
wozniakpiotr/java_training,wozniakpiotr/java_training,wozniakpiotr/java_training
java
## Code Before: package kurs.zadania.adressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.firefox.FirefoxDriver; public class NavigationHelper extends HelperBase{ public NavigationHelper(WebDriver wd) { super(wd); } // public void gotoGroupPage() { click(By.linkText("groups")); } /* sprawdzanie elementu na stronie i przejście po linku - wariant 1 public void gotoGroupPage() { if (!isElementPresent(By.tagName("h1")) || !wd.findElement(By.tagName("h1")).getText().equals("Groups") || !isElementPresent(By.name("new"))) { click(By.linkText("groups")); } } */ // sprawdzanie elementu na stronie i przejście po linku - wariant 2 lepszy public void gotoGroupPage() { if (isElementPresent(By.tagName("h1")) && !wd.findElement(By.tagName("h1")).getText().equals("Groups") && !isElementPresent(By.name("new"))) { return; } click(By.linkText("groups")); } public void gotoMainPage() { if (isElementPresent(By.id("maintable"))) { return; } click(By.linkText("home")); } } ## Instruction: Revert "Revert "Revert "optimization of the transition between the pages""" This reverts commit 4ad3b97fcc8ee3476a910d0a891eb66b069f2843. ## Code After: package kurs.zadania.adressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.firefox.FirefoxDriver; public class NavigationHelper extends HelperBase{ public NavigationHelper(WebDriver wd) { super(wd); } public void gotoGroupPage() { click(By.linkText("groups")); } public void gotoMainPage() { click(By.linkText("home")); } }
// ... existing code ... super(wd); } public void gotoGroupPage() { click(By.linkText("groups")); } public void gotoMainPage() { click(By.linkText("home")); } } // ... rest of the code ...
66a9d140feb3a0bd332031853fb1038622fd5c5b
oidc_apis/utils.py
oidc_apis/utils.py
from collections import OrderedDict def combine_uniquely(iterable1, iterable2): """ Combine unique items of two sequences preserving order. :type seq1: Iterable[Any] :type seq2: Iterable[Any] :rtype: list[Any] """ result = OrderedDict.fromkeys(iterable1) for item in iterable2: result[item] = None return list(result.keys()) def after_userlogin_hook(request, user, client): """Marks Django session modified The purpose of this function is to keep the session used by the oidc-provider fresh. This is achieved by pointing 'OIDC_AFTER_USERLOGIN_HOOK' setting to this.""" request.session.modified = True # Return None to continue the login flow return None
from collections import OrderedDict import django from oidc_provider import settings from django.contrib.auth import BACKEND_SESSION_KEY from django.contrib.auth import logout as django_user_logout from users.models import LoginMethod, OidcClientOptions from django.contrib.auth.views import redirect_to_login def combine_uniquely(iterable1, iterable2): """ Combine unique items of two sequences preserving order. :type seq1: Iterable[Any] :type seq2: Iterable[Any] :rtype: list[Any] """ result = OrderedDict.fromkeys(iterable1) for item in iterable2: result[item] = None return list(result.keys()) def after_userlogin_hook(request, user, client): """Marks Django session modified The purpose of this function is to keep the session used by the oidc-provider fresh. This is achieved by pointing 'OIDC_AFTER_USERLOGIN_HOOK' setting to this.""" request.session.modified = True last_login_backend = request.session.get('social_auth_last_login_backend') client_options = OidcClientOptions.objects.get(oidc_client=client) allowed_methods = client_options.login_methods.all() if allowed_methods is None: raise django.core.exceptions.PermissionDenied allowed_providers = set((x.provider_id for x in allowed_methods)) if last_login_backend is not None: active_backend = user.social_auth.filter(provider=last_login_backend) if ((last_login_backend is None and user is not None) or (active_backend.exists() and active_backend.first().provider not in allowed_providers)): django_user_logout(request) next_page = request.get_full_path() return redirect_to_login(next_page, settings.get('OIDC_LOGIN_URL')) # Return None to continue the login flow return None
Implement current session auth method check
Implement current session auth method check
Python
mit
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
python
## Code Before: from collections import OrderedDict def combine_uniquely(iterable1, iterable2): """ Combine unique items of two sequences preserving order. :type seq1: Iterable[Any] :type seq2: Iterable[Any] :rtype: list[Any] """ result = OrderedDict.fromkeys(iterable1) for item in iterable2: result[item] = None return list(result.keys()) def after_userlogin_hook(request, user, client): """Marks Django session modified The purpose of this function is to keep the session used by the oidc-provider fresh. This is achieved by pointing 'OIDC_AFTER_USERLOGIN_HOOK' setting to this.""" request.session.modified = True # Return None to continue the login flow return None ## Instruction: Implement current session auth method check ## Code After: from collections import OrderedDict import django from oidc_provider import settings from django.contrib.auth import BACKEND_SESSION_KEY from django.contrib.auth import logout as django_user_logout from users.models import LoginMethod, OidcClientOptions from django.contrib.auth.views import redirect_to_login def combine_uniquely(iterable1, iterable2): """ Combine unique items of two sequences preserving order. :type seq1: Iterable[Any] :type seq2: Iterable[Any] :rtype: list[Any] """ result = OrderedDict.fromkeys(iterable1) for item in iterable2: result[item] = None return list(result.keys()) def after_userlogin_hook(request, user, client): """Marks Django session modified The purpose of this function is to keep the session used by the oidc-provider fresh. This is achieved by pointing 'OIDC_AFTER_USERLOGIN_HOOK' setting to this.""" request.session.modified = True last_login_backend = request.session.get('social_auth_last_login_backend') client_options = OidcClientOptions.objects.get(oidc_client=client) allowed_methods = client_options.login_methods.all() if allowed_methods is None: raise django.core.exceptions.PermissionDenied allowed_providers = set((x.provider_id for x in allowed_methods)) if last_login_backend is not None: active_backend = user.social_auth.filter(provider=last_login_backend) if ((last_login_backend is None and user is not None) or (active_backend.exists() and active_backend.first().provider not in allowed_providers)): django_user_logout(request) next_page = request.get_full_path() return redirect_to_login(next_page, settings.get('OIDC_LOGIN_URL')) # Return None to continue the login flow return None
// ... existing code ... from collections import OrderedDict import django from oidc_provider import settings from django.contrib.auth import BACKEND_SESSION_KEY from django.contrib.auth import logout as django_user_logout from users.models import LoginMethod, OidcClientOptions from django.contrib.auth.views import redirect_to_login def combine_uniquely(iterable1, iterable2): // ... modified code ... 'OIDC_AFTER_USERLOGIN_HOOK' setting to this.""" request.session.modified = True last_login_backend = request.session.get('social_auth_last_login_backend') client_options = OidcClientOptions.objects.get(oidc_client=client) allowed_methods = client_options.login_methods.all() if allowed_methods is None: raise django.core.exceptions.PermissionDenied allowed_providers = set((x.provider_id for x in allowed_methods)) if last_login_backend is not None: active_backend = user.social_auth.filter(provider=last_login_backend) if ((last_login_backend is None and user is not None) or (active_backend.exists() and active_backend.first().provider not in allowed_providers)): django_user_logout(request) next_page = request.get_full_path() return redirect_to_login(next_page, settings.get('OIDC_LOGIN_URL')) # Return None to continue the login flow return None // ... rest of the code ...
f6a3f0ba3a0e178e4fe99204eb4f14a3c90d1475
src/main/java/me/coley/recaf/parse/assembly/exception/ExceptionWrapper.java
src/main/java/me/coley/recaf/parse/assembly/exception/ExceptionWrapper.java
package me.coley.recaf.parse.assembly.exception; /** * Wrapper for assembler exceptions with the lines that caused them. * * @author Matt */ public class ExceptionWrapper { public final int line; public final Exception exception; public ExceptionWrapper(int line, Exception exception) { this.line = line; this.exception = exception; } }
package me.coley.recaf.parse.assembly.exception; /** * Wrapper for assembler exceptions with the lines that caused them. * * @author Matt */ public class ExceptionWrapper { public final int line; public final Exception exception; public ExceptionWrapper(int line, Exception exception) { this.line = line; this.exception = exception; } public void printStackTrace() { exception.printStackTrace(); } }
Add qol access to printStackTrace
Add qol access to printStackTrace
Java
mit
Col-E/Recaf,Col-E/Recaf
java
## Code Before: package me.coley.recaf.parse.assembly.exception; /** * Wrapper for assembler exceptions with the lines that caused them. * * @author Matt */ public class ExceptionWrapper { public final int line; public final Exception exception; public ExceptionWrapper(int line, Exception exception) { this.line = line; this.exception = exception; } } ## Instruction: Add qol access to printStackTrace ## Code After: package me.coley.recaf.parse.assembly.exception; /** * Wrapper for assembler exceptions with the lines that caused them. * * @author Matt */ public class ExceptionWrapper { public final int line; public final Exception exception; public ExceptionWrapper(int line, Exception exception) { this.line = line; this.exception = exception; } public void printStackTrace() { exception.printStackTrace(); } }
// ... existing code ... this.line = line; this.exception = exception; } public void printStackTrace() { exception.printStackTrace(); } } // ... rest of the code ...
1e7ae82002716f1314e19258e7e30a50e57f552e
samples/hello-js/build.gradle.kts
samples/hello-js/build.gradle.kts
import org.jetbrains.kotlin.gradle.tasks.Kotlin2JsCompile buildscript { repositories { jcenter() } dependencies { classpath(kotlin("gradle-plugin")) } } apply { plugin("kotlin2js") } dependencies { "compile"(kotlin("stdlib-js")) } repositories { jcenter() } tasks.withType<Kotlin2JsCompile> { kotlinOptions.outputFile = "$buildDir/web/output.js" kotlinOptions.sourceMap = true } val assembleWeb by tasks.creating assembleWeb.doLast { configurations["compileClasspath"].forEach { file: File -> copy { includeEmptyDirs = false from(zipTree(file.absoluteFile)) into("$buildDir/web") include { fileTreeElement -> val path = fileTreeElement.path path.endsWith(".js") && (path.startsWith("META-INF/resources/") || !path.startsWith("META-INF/")) } } copy { from("${the<JavaPluginConvention>().sourceSets["main"].output.resourcesDir}/index.html") into("$buildDir/web") } } } val assemble by tasks val classes by tasks assemble.dependsOn(assembleWeb) assembleWeb.dependsOn(classes)
import org.jetbrains.kotlin.gradle.tasks.Kotlin2JsCompile buildscript { repositories { jcenter() } dependencies { classpath(kotlin("gradle-plugin")) } } apply { plugin("kotlin2js") } dependencies { "compile"(kotlin("stdlib-js")) } repositories { jcenter() } tasks.withType<Kotlin2JsCompile> { kotlinOptions.outputFile = "$buildDir/web/output.js" kotlinOptions.sourceMap = true } val assembleWeb by tasks.creating assembleWeb.doLast { configurations["compileClasspath"].forEach { file: File -> copy { includeEmptyDirs = false from(zipTree(file.absoluteFile)) into("$buildDir/web") include { fileTreeElement -> val path = fileTreeElement.path path.endsWith(".js") && (path.startsWith("META-INF/resources/") || !path.startsWith("META-INF/")) } } } copy { from("${the<JavaPluginConvention>().sourceSets["main"].output.resourcesDir}/index.html") into("$buildDir/web") } } val assemble by tasks val classes by tasks assemble.dependsOn(assembleWeb) assembleWeb.dependsOn(classes)
Move copy-statement for the static index.html outside the foreach loop
Move copy-statement for the static index.html outside the foreach loop
Kotlin
apache-2.0
gradle/gradle,gradle/gradle-script-kotlin,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,gradle/gradle-script-kotlin,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle
kotlin
## Code Before: import org.jetbrains.kotlin.gradle.tasks.Kotlin2JsCompile buildscript { repositories { jcenter() } dependencies { classpath(kotlin("gradle-plugin")) } } apply { plugin("kotlin2js") } dependencies { "compile"(kotlin("stdlib-js")) } repositories { jcenter() } tasks.withType<Kotlin2JsCompile> { kotlinOptions.outputFile = "$buildDir/web/output.js" kotlinOptions.sourceMap = true } val assembleWeb by tasks.creating assembleWeb.doLast { configurations["compileClasspath"].forEach { file: File -> copy { includeEmptyDirs = false from(zipTree(file.absoluteFile)) into("$buildDir/web") include { fileTreeElement -> val path = fileTreeElement.path path.endsWith(".js") && (path.startsWith("META-INF/resources/") || !path.startsWith("META-INF/")) } } copy { from("${the<JavaPluginConvention>().sourceSets["main"].output.resourcesDir}/index.html") into("$buildDir/web") } } } val assemble by tasks val classes by tasks assemble.dependsOn(assembleWeb) assembleWeb.dependsOn(classes) ## Instruction: Move copy-statement for the static index.html outside the foreach loop ## Code After: import org.jetbrains.kotlin.gradle.tasks.Kotlin2JsCompile buildscript { repositories { jcenter() } dependencies { classpath(kotlin("gradle-plugin")) } } apply { plugin("kotlin2js") } dependencies { "compile"(kotlin("stdlib-js")) } repositories { jcenter() } tasks.withType<Kotlin2JsCompile> { kotlinOptions.outputFile = "$buildDir/web/output.js" kotlinOptions.sourceMap = true } val assembleWeb by tasks.creating assembleWeb.doLast { configurations["compileClasspath"].forEach { file: File -> copy { includeEmptyDirs = false from(zipTree(file.absoluteFile)) into("$buildDir/web") include { fileTreeElement -> val path = fileTreeElement.path path.endsWith(".js") && (path.startsWith("META-INF/resources/") || !path.startsWith("META-INF/")) } } } copy { from("${the<JavaPluginConvention>().sourceSets["main"].output.resourcesDir}/index.html") into("$buildDir/web") } } val assemble by tasks val classes by tasks assemble.dependsOn(assembleWeb) assembleWeb.dependsOn(classes)
# ... existing code ... path.endsWith(".js") && (path.startsWith("META-INF/resources/") || !path.startsWith("META-INF/")) } } } copy { from("${the<JavaPluginConvention>().sourceSets["main"].output.resourcesDir}/index.html") into("$buildDir/web") } } # ... rest of the code ...
fb3f92c36d9c530560fddf2b425821a79bd0255b
subprojects/docs/src/snippets/plugins/pluginManagement/kotlin/init.gradle.kts
subprojects/docs/src/snippets/plugins/pluginManagement/kotlin/init.gradle.kts
settingsEvaluated { settings -> settings.pluginManagement { plugins { } resolutionStrategy { } repositories { } } }
settingsEvaluated { pluginManagement { plugins { } resolutionStrategy { } repositories { } } }
Fix Gradle Kotlin DSL init script sample snippet
Fix Gradle Kotlin DSL init script sample snippet See #11885
Kotlin
apache-2.0
gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle
kotlin
## Code Before: settingsEvaluated { settings -> settings.pluginManagement { plugins { } resolutionStrategy { } repositories { } } } ## Instruction: Fix Gradle Kotlin DSL init script sample snippet See #11885 ## Code After: settingsEvaluated { pluginManagement { plugins { } resolutionStrategy { } repositories { } } }
... settingsEvaluated { pluginManagement { plugins { } resolutionStrategy { ...
4ec8a5b5880f7e5300d71c34f5b293ea5993f5b2
tests/test_storage.py
tests/test_storage.py
import os import json import pytest def test_add_single(identity_fixures, identity_store): for d in identity_fixures: identity = identity_store.add_identity(d) for key, val in d.items(): assert getattr(identity, key) == val def test_add_multiple(identity_fixures, identity_store): identity_store.add_identities(*identity_fixures) assert len(identity_store.identities) == len(identity_fixures) def test_id_validation(identity_fixures, identity_store): from awsident.storage import IdentityExists identity_store.add_identities(*identity_fixures) with pytest.raises(IdentityExists): identity_store.add_identity(identity_fixures[0]) identity = identity_store.identities.values()[0] original_id = identity.id identity.access_key_id = 'ichanged' assert 'ichanged' in identity_store.keys() assert original_id not in identity_store.keys() def test_serialization(identity_fixures, identity_store): identity_store.add_identities(*identity_fixures) # data should have been saved at this point so clear and reload it identity_store.identities.clear() identity_store.load_from_config() for data in identity_fixures: identity = identity_store.get(data['access_key_id']) for key, val in data.items(): assert getattr(identity, key) == val
import os import json import pytest def test_add_single(identity_fixures, identity_store): for d in identity_fixures: identity = identity_store.add_identity(d) for key, val in d.items(): assert getattr(identity, key) == val def test_add_multiple(identity_fixures, identity_store): identity_store.add_identities(*identity_fixures) assert len(identity_store.identities) == len(identity_fixures) def test_id_validation(identity_fixures, identity_store): from awsident.storage import IdentityExists identity_store.add_identities(*identity_fixures) with pytest.raises(IdentityExists): identity_store.add_identity(identity_fixures[0]) identity = list(identity_store.values())[0] original_id = identity.id identity.access_key_id = 'ichanged' assert 'ichanged' in identity_store.keys() assert original_id not in identity_store.keys() def test_serialization(identity_fixures, identity_store): identity_store.add_identities(*identity_fixures) # data should have been saved at this point so clear and reload it identity_store.identities.clear() identity_store.load_from_config() for data in identity_fixures: identity = identity_store.get(data['access_key_id']) for key, val in data.items(): assert getattr(identity, key) == val
Convert dict view to list for Py3
Convert dict view to list for Py3
Python
mit
nocarryr/AWS-Identity-Manager
python
## Code Before: import os import json import pytest def test_add_single(identity_fixures, identity_store): for d in identity_fixures: identity = identity_store.add_identity(d) for key, val in d.items(): assert getattr(identity, key) == val def test_add_multiple(identity_fixures, identity_store): identity_store.add_identities(*identity_fixures) assert len(identity_store.identities) == len(identity_fixures) def test_id_validation(identity_fixures, identity_store): from awsident.storage import IdentityExists identity_store.add_identities(*identity_fixures) with pytest.raises(IdentityExists): identity_store.add_identity(identity_fixures[0]) identity = identity_store.identities.values()[0] original_id = identity.id identity.access_key_id = 'ichanged' assert 'ichanged' in identity_store.keys() assert original_id not in identity_store.keys() def test_serialization(identity_fixures, identity_store): identity_store.add_identities(*identity_fixures) # data should have been saved at this point so clear and reload it identity_store.identities.clear() identity_store.load_from_config() for data in identity_fixures: identity = identity_store.get(data['access_key_id']) for key, val in data.items(): assert getattr(identity, key) == val ## Instruction: Convert dict view to list for Py3 ## Code After: import os import json import pytest def test_add_single(identity_fixures, identity_store): for d in identity_fixures: identity = identity_store.add_identity(d) for key, val in d.items(): assert getattr(identity, key) == val def test_add_multiple(identity_fixures, identity_store): identity_store.add_identities(*identity_fixures) assert len(identity_store.identities) == len(identity_fixures) def test_id_validation(identity_fixures, identity_store): from awsident.storage import IdentityExists identity_store.add_identities(*identity_fixures) with pytest.raises(IdentityExists): identity_store.add_identity(identity_fixures[0]) identity = list(identity_store.values())[0] original_id = identity.id identity.access_key_id = 'ichanged' assert 'ichanged' in identity_store.keys() assert original_id not in identity_store.keys() def test_serialization(identity_fixures, identity_store): identity_store.add_identities(*identity_fixures) # data should have been saved at this point so clear and reload it identity_store.identities.clear() identity_store.load_from_config() for data in identity_fixures: identity = identity_store.get(data['access_key_id']) for key, val in data.items(): assert getattr(identity, key) == val
... identity_store.add_identities(*identity_fixures) with pytest.raises(IdentityExists): identity_store.add_identity(identity_fixures[0]) identity = list(identity_store.values())[0] original_id = identity.id identity.access_key_id = 'ichanged' assert 'ichanged' in identity_store.keys() ...
e21260b57873ed70bd6b1690b62a754af58020fc
otp_twilio/migrations/0002_last_t.py
otp_twilio/migrations/0002_last_t.py
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('otp_twilio_encrypted', '0001_initial'), ] operations = [ migrations.AddField( model_name='twiliosmsdevice', name='last_t', field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'), preserve_default=True, ), ]
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('otp_twilio', '0001_initial'), ] operations = [ migrations.AddField( model_name='twiliosmsdevice', name='last_t', field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'), preserve_default=True, ), ]
Undo dependency name change in last migration
Undo dependency name change in last migration
Python
bsd-2-clause
prototypsthlm/otp_twilio_encrypted,gustavrannestig/otp_twilio_encrypted
python
## Code Before: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('otp_twilio_encrypted', '0001_initial'), ] operations = [ migrations.AddField( model_name='twiliosmsdevice', name='last_t', field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'), preserve_default=True, ), ] ## Instruction: Undo dependency name change in last migration ## Code After: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('otp_twilio', '0001_initial'), ] operations = [ migrations.AddField( model_name='twiliosmsdevice', name='last_t', field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'), preserve_default=True, ), ]
// ... existing code ... class Migration(migrations.Migration): dependencies = [ ('otp_twilio', '0001_initial'), ] operations = [ // ... rest of the code ...
c6fa98931feaf9514b84ae979f32013ca345ef5f
saleor/product/views.py
saleor/product/views.py
from __future__ import unicode_literals from django.http import HttpResponsePermanentRedirect from django.contrib import messages from django.shortcuts import get_object_or_404, redirect from django.template.response import TemplateResponse from django.utils.translation import ugettext as _ from .forms import get_form_class_for_product from .models import Product, Category def product_details(request, slug, product_id): product = get_object_or_404(Product, id=product_id) if product.get_slug() != slug: return HttpResponsePermanentRedirect(product.get_absolute_url()) form_class = get_form_class_for_product(product) form = form_class(cart=request.cart, product=product, data=request.POST or None) if form.is_valid(): if form.cleaned_data['quantity']: msg = _('Added %(product)s to your cart.') % { 'product': product} messages.success(request, msg) form.save() return redirect('product:details', slug=slug, product_id=product_id) return TemplateResponse(request, 'product/details.html', { 'product': product, 'form': form}) def category_index(request, slug): category = get_object_or_404(Category, slug=slug) products = category.products.all() return TemplateResponse(request, 'category/index.html', { 'products': products, 'category': category})
from __future__ import unicode_literals from django.http import HttpResponsePermanentRedirect from django.contrib import messages from django.shortcuts import get_object_or_404, redirect from django.template.response import TemplateResponse from django.utils.translation import ugettext as _ from .forms import get_form_class_for_product from .models import Product, Category def product_details(request, slug, product_id): product = get_object_or_404(Product, id=product_id) if product.get_slug() != slug: return HttpResponsePermanentRedirect(product.get_absolute_url()) form_class = get_form_class_for_product(product) form = form_class(cart=request.cart, product=product, data=request.POST or None) if form.is_valid(): if form.cleaned_data['quantity']: msg = _('Added %(product)s to your cart.') % { 'product': product} messages.success(request, msg) form.save() return redirect('product:details', slug=slug, product_id=product_id) template_name = 'product/details_%s' % (product.__class__.__name__.lower()) return TemplateResponse(request, [template_name, 'product/details.html'], { 'product': product, 'form': form}) def category_index(request, slug): category = get_object_or_404(Category, slug=slug) products = category.products.all() return TemplateResponse(request, 'category/index.html', { 'products': products, 'category': category})
Allow to use custom templates for products details pages
Allow to use custom templates for products details pages
Python
bsd-3-clause
josesanch/saleor,car3oon/saleor,UITools/saleor,maferelo/saleor,avorio/saleor,tfroehlich82/saleor,avorio/saleor,laosunhust/saleor,KenMutemi/saleor,paweltin/saleor,mociepka/saleor,dashmug/saleor,HyperManTT/ECommerceSaleor,paweltin/saleor,HyperManTT/ECommerceSaleor,Drekscott/Motlaesaleor,taedori81/saleor,UITools/saleor,josesanch/saleor,itbabu/saleor,UITools/saleor,KenMutemi/saleor,rchav/vinerack,spartonia/saleor,rchav/vinerack,paweltin/saleor,spartonia/saleor,HyperManTT/ECommerceSaleor,car3oon/saleor,paweltin/saleor,laosunhust/saleor,hongquan/saleor,rodrigozn/CW-Shop,maferelo/saleor,UITools/saleor,dashmug/saleor,spartonia/saleor,taedori81/saleor,taedori81/saleor,mociepka/saleor,laosunhust/saleor,hongquan/saleor,rchav/vinerack,taedori81/saleor,jreigel/saleor,Drekscott/Motlaesaleor,jreigel/saleor,arth-co/saleor,dashmug/saleor,arth-co/saleor,maferelo/saleor,mociepka/saleor,avorio/saleor,car3oon/saleor,UITools/saleor,rodrigozn/CW-Shop,itbabu/saleor,spartonia/saleor,Drekscott/Motlaesaleor,Drekscott/Motlaesaleor,KenMutemi/saleor,hongquan/saleor,laosunhust/saleor,arth-co/saleor,josesanch/saleor,itbabu/saleor,jreigel/saleor,tfroehlich82/saleor,rodrigozn/CW-Shop,tfroehlich82/saleor,arth-co/saleor,avorio/saleor
python
## Code Before: from __future__ import unicode_literals from django.http import HttpResponsePermanentRedirect from django.contrib import messages from django.shortcuts import get_object_or_404, redirect from django.template.response import TemplateResponse from django.utils.translation import ugettext as _ from .forms import get_form_class_for_product from .models import Product, Category def product_details(request, slug, product_id): product = get_object_or_404(Product, id=product_id) if product.get_slug() != slug: return HttpResponsePermanentRedirect(product.get_absolute_url()) form_class = get_form_class_for_product(product) form = form_class(cart=request.cart, product=product, data=request.POST or None) if form.is_valid(): if form.cleaned_data['quantity']: msg = _('Added %(product)s to your cart.') % { 'product': product} messages.success(request, msg) form.save() return redirect('product:details', slug=slug, product_id=product_id) return TemplateResponse(request, 'product/details.html', { 'product': product, 'form': form}) def category_index(request, slug): category = get_object_or_404(Category, slug=slug) products = category.products.all() return TemplateResponse(request, 'category/index.html', { 'products': products, 'category': category}) ## Instruction: Allow to use custom templates for products details pages ## Code After: from __future__ import unicode_literals from django.http import HttpResponsePermanentRedirect from django.contrib import messages from django.shortcuts import get_object_or_404, redirect from django.template.response import TemplateResponse from django.utils.translation import ugettext as _ from .forms import get_form_class_for_product from .models import Product, Category def product_details(request, slug, product_id): product = get_object_or_404(Product, id=product_id) if product.get_slug() != slug: return HttpResponsePermanentRedirect(product.get_absolute_url()) form_class = get_form_class_for_product(product) form = form_class(cart=request.cart, product=product, data=request.POST or None) if form.is_valid(): if form.cleaned_data['quantity']: msg = _('Added %(product)s to your cart.') % { 'product': product} messages.success(request, msg) form.save() return redirect('product:details', slug=slug, product_id=product_id) template_name = 'product/details_%s' % (product.__class__.__name__.lower()) return TemplateResponse(request, [template_name, 'product/details.html'], { 'product': product, 'form': form}) def category_index(request, slug): category = get_object_or_404(Category, slug=slug) products = category.products.all() return TemplateResponse(request, 'category/index.html', { 'products': products, 'category': category})
# ... existing code ... messages.success(request, msg) form.save() return redirect('product:details', slug=slug, product_id=product_id) template_name = 'product/details_%s' % (product.__class__.__name__.lower()) return TemplateResponse(request, [template_name, 'product/details.html'], { 'product': product, 'form': form}) # ... rest of the code ...
cc9cae41e368e6834190c0316a9425b6bcba6ecd
src/main/java/de/craften/plugins/educraft/luaapi/functions/MoveForwardFunction.java
src/main/java/de/craften/plugins/educraft/luaapi/functions/MoveForwardFunction.java
package de.craften.plugins.educraft.luaapi.functions; import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.bukkit.Location; import org.bukkit.block.BlockFace; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; /** * Lua API function to move one block forward. */ public class MoveForwardFunction extends EduCraftApiFunction { @Override public Varargs execute(Varargs varargs) { Location targetLocation = getApi().getStationaryBehavior().getLocation().clone().add(getApi().getDirection()); if (!targetLocation.getBlock().getType().isSolid() && !targetLocation.getBlock().getRelative(BlockFace.UP).getType().isSolid()) { getApi().getStationaryBehavior().setLocation(targetLocation); } return LuaValue.NIL; } }
package de.craften.plugins.educraft.luaapi.functions; import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.bukkit.Location; import org.bukkit.block.BlockFace; import org.bukkit.entity.Entity; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; import java.util.Collection; /** * Lua API function to move one block forward. */ public class MoveForwardFunction extends EduCraftApiFunction { @Override public Varargs execute(Varargs varargs) { Location targetLocation = getApi().getStationaryBehavior().getLocation().clone().add(getApi().getDirection()); Collection<Entity> entities = targetLocation.getWorld().getNearbyEntities(targetLocation.getBlock().getLocation().add(0.5, 0.5, 0.5), 0.5, 0.5, 0.5); if (!targetLocation.getBlock().getType().isSolid() && !targetLocation.getBlock().getRelative(BlockFace.UP).getType().isSolid() && entities.isEmpty()) { getApi().getStationaryBehavior().setLocation(targetLocation); } return LuaValue.NIL; } }
Check for entities ahead before moving forward.
Check for entities ahead before moving forward.
Java
mit
leMaik/EduCraft
java
## Code Before: package de.craften.plugins.educraft.luaapi.functions; import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.bukkit.Location; import org.bukkit.block.BlockFace; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; /** * Lua API function to move one block forward. */ public class MoveForwardFunction extends EduCraftApiFunction { @Override public Varargs execute(Varargs varargs) { Location targetLocation = getApi().getStationaryBehavior().getLocation().clone().add(getApi().getDirection()); if (!targetLocation.getBlock().getType().isSolid() && !targetLocation.getBlock().getRelative(BlockFace.UP).getType().isSolid()) { getApi().getStationaryBehavior().setLocation(targetLocation); } return LuaValue.NIL; } } ## Instruction: Check for entities ahead before moving forward. ## Code After: package de.craften.plugins.educraft.luaapi.functions; import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.bukkit.Location; import org.bukkit.block.BlockFace; import org.bukkit.entity.Entity; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; import java.util.Collection; /** * Lua API function to move one block forward. */ public class MoveForwardFunction extends EduCraftApiFunction { @Override public Varargs execute(Varargs varargs) { Location targetLocation = getApi().getStationaryBehavior().getLocation().clone().add(getApi().getDirection()); Collection<Entity> entities = targetLocation.getWorld().getNearbyEntities(targetLocation.getBlock().getLocation().add(0.5, 0.5, 0.5), 0.5, 0.5, 0.5); if (!targetLocation.getBlock().getType().isSolid() && !targetLocation.getBlock().getRelative(BlockFace.UP).getType().isSolid() && entities.isEmpty()) { getApi().getStationaryBehavior().setLocation(targetLocation); } return LuaValue.NIL; } }
... import de.craften.plugins.educraft.luaapi.EduCraftApiFunction; import org.bukkit.Location; import org.bukkit.block.BlockFace; import org.bukkit.entity.Entity; import org.luaj.vm2.LuaValue; import org.luaj.vm2.Varargs; import java.util.Collection; /** * Lua API function to move one block forward. ... @Override public Varargs execute(Varargs varargs) { Location targetLocation = getApi().getStationaryBehavior().getLocation().clone().add(getApi().getDirection()); Collection<Entity> entities = targetLocation.getWorld().getNearbyEntities(targetLocation.getBlock().getLocation().add(0.5, 0.5, 0.5), 0.5, 0.5, 0.5); if (!targetLocation.getBlock().getType().isSolid() && !targetLocation.getBlock().getRelative(BlockFace.UP).getType().isSolid() && entities.isEmpty()) { getApi().getStationaryBehavior().setLocation(targetLocation); } ...
5d15ae493663b23d1554f4f285cf3d2044134878
pybossa_analyst/zip_builder.py
pybossa_analyst/zip_builder.py
"""Zip builder module for pybossa-analyst.""" import requests import zipstream def _download(url): """Download data from a URL.""" yield requests.get(url).content def _generate_zip(tasks, fn_key, url_key): """Generate a zip containing downloaded task data.""" z = zipstream.ZipFile(compression=zipstream.ZIP_DEFLATED) for t in tasks: fn = t.info[fn_key] url = t.info[url_key] z.write_iter(fn, _download(url)) for chunk in z: yield chunk def generate(tasks, importer): """Generate a zip file containing original task input.""" if importer == 'flickr': return _generate_zip(tasks, "title", "url") else: raise ValueError("Unknown importer type")
"""Zip builder module for pybossa-analyst.""" import requests import zipstream def _download(url): """Download data from a URL.""" yield requests.get(url).content def _generate_zip(tasks, fn_key, url_key): """Generate a zip containing downloaded task data.""" z = zipstream.ZipFile(compression=zipstream.ZIP_DEFLATED) for t in tasks: fn = t.info[fn_key] url = t.info[url_key] z.write_iter(fn, _download(url)) for chunk in z: yield chunk def generate(tasks, importer): """Generate a zip file containing original task input.""" if importer == 'flickr': for t in tasks: t.info["title"] = "{0}.{1}".format(t.info["title"], "jpg") return _generate_zip(tasks, "title", "url") else: raise ValueError("Unknown importer type")
Add jpg link for flickr downloads
Add jpg link for flickr downloads
Python
unknown
alexandermendes/pybossa-analyst,alexandermendes/pybossa-analyst,alexandermendes/pybossa-analyst,LibCrowds/libcrowds-analyst
python
## Code Before: """Zip builder module for pybossa-analyst.""" import requests import zipstream def _download(url): """Download data from a URL.""" yield requests.get(url).content def _generate_zip(tasks, fn_key, url_key): """Generate a zip containing downloaded task data.""" z = zipstream.ZipFile(compression=zipstream.ZIP_DEFLATED) for t in tasks: fn = t.info[fn_key] url = t.info[url_key] z.write_iter(fn, _download(url)) for chunk in z: yield chunk def generate(tasks, importer): """Generate a zip file containing original task input.""" if importer == 'flickr': return _generate_zip(tasks, "title", "url") else: raise ValueError("Unknown importer type") ## Instruction: Add jpg link for flickr downloads ## Code After: """Zip builder module for pybossa-analyst.""" import requests import zipstream def _download(url): """Download data from a URL.""" yield requests.get(url).content def _generate_zip(tasks, fn_key, url_key): """Generate a zip containing downloaded task data.""" z = zipstream.ZipFile(compression=zipstream.ZIP_DEFLATED) for t in tasks: fn = t.info[fn_key] url = t.info[url_key] z.write_iter(fn, _download(url)) for chunk in z: yield chunk def generate(tasks, importer): """Generate a zip file containing original task input.""" if importer == 'flickr': for t in tasks: t.info["title"] = "{0}.{1}".format(t.info["title"], "jpg") return _generate_zip(tasks, "title", "url") else: raise ValueError("Unknown importer type")
// ... existing code ... def generate(tasks, importer): """Generate a zip file containing original task input.""" if importer == 'flickr': for t in tasks: t.info["title"] = "{0}.{1}".format(t.info["title"], "jpg") return _generate_zip(tasks, "title", "url") else: raise ValueError("Unknown importer type") // ... rest of the code ...
177e9089a4bfdb04d73a12eba6dc3b426b33b8d6
src/GtkApplicationDelegate.h
src/GtkApplicationDelegate.h
/* GTK+ Integration with platform-specific application-wide features * such as the OS X menubar and application delegate concepts. * * Copyright (C) 2009 Paul Davis * Copyright © 2010 John Ralls * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; version 2.1 * of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. */ #import <Cocoa/Cocoa.h> @interface GtkApplicationDelegate : NSObject <NSApplicationDelegate> {} @end
/* GTK+ Integration with platform-specific application-wide features * such as the OS X menubar and application delegate concepts. * * Copyright (C) 2009 Paul Davis * Copyright © 2010 John Ralls * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; version 2.1 * of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. */ #import <Cocoa/Cocoa.h> #include <AvailabilityMacros.h> #if MAC_OS_X_VERSION_MIN_REQUIRED >= 1060 @interface GtkApplicationDelegate : NSObject <NSApplicationDelegate> {} #else @interface GtkApplicationDelegate : NSObject {} #endif @end
Fix compile error on Leopard & Tiger.
Fix compile error on Leopard & Tiger.
C
lgpl-2.1
GNOME/gtk-mac-integration,GNOME/gtk-mac-integration,sharoonthomas/gtk-mac-integration,GNOME/gtk-mac-integration,sharoonthomas/gtk-mac-integration,sharoonthomas/gtk-mac-integration,sharoonthomas/gtk-mac-integration,jralls/gtk-mac-integration,sharoonthomas/gtk-mac-integration,jralls/gtk-mac-integration,jralls/gtk-mac-integration
c
## Code Before: /* GTK+ Integration with platform-specific application-wide features * such as the OS X menubar and application delegate concepts. * * Copyright (C) 2009 Paul Davis * Copyright © 2010 John Ralls * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; version 2.1 * of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. */ #import <Cocoa/Cocoa.h> @interface GtkApplicationDelegate : NSObject <NSApplicationDelegate> {} @end ## Instruction: Fix compile error on Leopard & Tiger. ## Code After: /* GTK+ Integration with platform-specific application-wide features * such as the OS X menubar and application delegate concepts. * * Copyright (C) 2009 Paul Davis * Copyright © 2010 John Ralls * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; version 2.1 * of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. */ #import <Cocoa/Cocoa.h> #include <AvailabilityMacros.h> #if MAC_OS_X_VERSION_MIN_REQUIRED >= 1060 @interface GtkApplicationDelegate : NSObject <NSApplicationDelegate> {} #else @interface GtkApplicationDelegate : NSObject {} #endif @end
// ... existing code ... */ #import <Cocoa/Cocoa.h> #include <AvailabilityMacros.h> #if MAC_OS_X_VERSION_MIN_REQUIRED >= 1060 @interface GtkApplicationDelegate : NSObject <NSApplicationDelegate> {} #else @interface GtkApplicationDelegate : NSObject {} #endif @end // ... rest of the code ...
159caac86902add83f4ff3a2d813559144806f98
src/forces/label_state.h
src/forces/label_state.h
namespace Forces { /** * \brief * * */ class LabelState { public: EIGEN_MAKE_ALIGNED_OPERATOR_NEW LabelState(int id, std::string text, Eigen::Vector3f anchorPosition); const int id; const Eigen::Vector3f anchorPosition; Eigen::Vector3f labelPosition; Eigen::Vector2f anchorPosition2D; Eigen::Vector2f labelPosition2D; float labelPositionDepth; private: std::string text; }; } // namespace Forces #endif // SRC_FORCES_LABEL_STATE_H_
namespace Forces { /** * \brief * * */ class LabelState { public: EIGEN_MAKE_ALIGNED_OPERATOR_NEW LabelState(int id, std::string text, Eigen::Vector3f anchorPosition); const int id; const Eigen::Vector3f anchorPosition; Eigen::Vector3f labelPosition; Eigen::Vector2f anchorPosition2D; Eigen::Vector2f labelPosition2D; float labelPositionDepth; const std::string text; }; } // namespace Forces #endif // SRC_FORCES_LABEL_STATE_H_
Make text in LabelState public.
Make text in LabelState public.
C
mit
Christof/voly-labeller,Christof/voly-labeller,Christof/voly-labeller,Christof/voly-labeller
c
## Code Before: namespace Forces { /** * \brief * * */ class LabelState { public: EIGEN_MAKE_ALIGNED_OPERATOR_NEW LabelState(int id, std::string text, Eigen::Vector3f anchorPosition); const int id; const Eigen::Vector3f anchorPosition; Eigen::Vector3f labelPosition; Eigen::Vector2f anchorPosition2D; Eigen::Vector2f labelPosition2D; float labelPositionDepth; private: std::string text; }; } // namespace Forces #endif // SRC_FORCES_LABEL_STATE_H_ ## Instruction: Make text in LabelState public. ## Code After: namespace Forces { /** * \brief * * */ class LabelState { public: EIGEN_MAKE_ALIGNED_OPERATOR_NEW LabelState(int id, std::string text, Eigen::Vector3f anchorPosition); const int id; const Eigen::Vector3f anchorPosition; Eigen::Vector3f labelPosition; Eigen::Vector2f anchorPosition2D; Eigen::Vector2f labelPosition2D; float labelPositionDepth; const std::string text; }; } // namespace Forces #endif // SRC_FORCES_LABEL_STATE_H_
... Eigen::Vector2f anchorPosition2D; Eigen::Vector2f labelPosition2D; float labelPositionDepth; const std::string text; }; } // namespace Forces ...
5b63aa453f1e5a5a7022b115c89633518caa7cb8
apt/src/main/java/net/wizardsoflua/annotation/GenerateLuaDoc.java
apt/src/main/java/net/wizardsoflua/annotation/GenerateLuaDoc.java
package net.wizardsoflua.annotation; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.SOURCE; import java.lang.annotation.Retention; import java.lang.annotation.Target; @Retention(SOURCE) @Target(TYPE) public @interface GenerateLuaDoc { /** * The name of the module. Required unless @{@link LuaClassAttributes} is present, in which case * the name defaults to {@link LuaClassAttributes#name()}. */ String name() default ""; /** * The subtitle of the module. */ String subtitle() default ""; /** * The document type ({@code "module"}, {@code "class"} or {@code "event"}). Defaults to * {@code "class"} when the type is also annotated with @{@link GenerateLuaClassTable}. Defaults * to {@code "module"} when the type is also annotated with @{@link GenerateLuaModuleTable}. */ String type() default ""; }
package net.wizardsoflua.annotation; import static java.lang.annotation.ElementType.TYPE; import java.lang.annotation.Target; /** * We use class retention, because otherwise this annotation is not available on unchanged classes * during eclipses incremental compilation. * * @author Adrodoc */ @Target(TYPE) public @interface GenerateLuaDoc { /** * The name of the module. Required unless @{@link LuaClassAttributes} is present, in which case * the name defaults to {@link LuaClassAttributes#name()}. */ String name() default ""; /** * The subtitle of the module. */ String subtitle() default ""; /** * The document type ({@code "module"}, {@code "class"} or {@code "event"}). Defaults to * {@code "class"} when the type is also annotated with @{@link GenerateLuaClassTable}. Defaults * to {@code "module"} when the type is also annotated with @{@link GenerateLuaModuleTable}. */ String type() default ""; }
Fix incremental compilation for lua classes that require a superclass name resolution
Fix incremental compilation for lua classes that require a superclass name resolution
Java
apache-2.0
mkarneim/luamod
java
## Code Before: package net.wizardsoflua.annotation; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.SOURCE; import java.lang.annotation.Retention; import java.lang.annotation.Target; @Retention(SOURCE) @Target(TYPE) public @interface GenerateLuaDoc { /** * The name of the module. Required unless @{@link LuaClassAttributes} is present, in which case * the name defaults to {@link LuaClassAttributes#name()}. */ String name() default ""; /** * The subtitle of the module. */ String subtitle() default ""; /** * The document type ({@code "module"}, {@code "class"} or {@code "event"}). Defaults to * {@code "class"} when the type is also annotated with @{@link GenerateLuaClassTable}. Defaults * to {@code "module"} when the type is also annotated with @{@link GenerateLuaModuleTable}. */ String type() default ""; } ## Instruction: Fix incremental compilation for lua classes that require a superclass name resolution ## Code After: package net.wizardsoflua.annotation; import static java.lang.annotation.ElementType.TYPE; import java.lang.annotation.Target; /** * We use class retention, because otherwise this annotation is not available on unchanged classes * during eclipses incremental compilation. * * @author Adrodoc */ @Target(TYPE) public @interface GenerateLuaDoc { /** * The name of the module. Required unless @{@link LuaClassAttributes} is present, in which case * the name defaults to {@link LuaClassAttributes#name()}. */ String name() default ""; /** * The subtitle of the module. */ String subtitle() default ""; /** * The document type ({@code "module"}, {@code "class"} or {@code "event"}). Defaults to * {@code "class"} when the type is also annotated with @{@link GenerateLuaClassTable}. Defaults * to {@code "module"} when the type is also annotated with @{@link GenerateLuaModuleTable}. */ String type() default ""; }
... package net.wizardsoflua.annotation; import static java.lang.annotation.ElementType.TYPE; import java.lang.annotation.Target; /** * We use class retention, because otherwise this annotation is not available on unchanged classes * during eclipses incremental compilation. * * @author Adrodoc */ @Target(TYPE) public @interface GenerateLuaDoc { /** ...
b26047600202a9776c99323813cf17b0aa951dcd
app/routes.py
app/routes.py
from flask import jsonify from . import app import mapper import utils from predict import predictor @app.route("/", methods=["GET"]) def index(): firebase_dump = mapper.get_dump_firebase() response = firebase_dump.get_all() response = response or {} return jsonify(response) @app.route("/build", methods=["POST"]) def build_model(): predictor.preprocess_airports() if not predictor.model: predictor.build_model() return jsonify({"message:" : "OK"}) @app.route("/predict", methods=["GET"]) def predict_all_delays(): results = None try: results = predictor.predict_all() except Exception as e: return jsonify({"message" : e.message}) return jsonify(results) @app.route("/predict/<airport_code>", methods=["GET"]) def predict_delay(airport_code): firebase_source = mapper.get_source_firebase() airport_status = firebase_source.get_airport(airport_code) cleaned_data = utils.get_clean_data(airport_status) res = predictor.predict(airport_code) cleaned_data["prediction"] = bool(res[0]) return jsonify(cleaned_data) @app.route("/status", methods=["GET"]) def get_airport_statuses(): firebase_source = mapper.get_source_firebase() airports = firebase_source.get_all() for airport_code, status in airports.items(): if "status" in status: del status["status"] return jsonify(airports)
from flask import jsonify from . import app import mapper import utils from predict import predictor @app.route("/", methods=["GET"]) def index(): return app.send_static_file("index.html") @app.route("/build", methods=["POST"]) def build_model(): predictor.preprocess_airports() if not predictor.model: predictor.build_model() return jsonify({"message:" : "OK"}) @app.route("/predict", methods=["GET"]) def predict_all_delays(): results = None try: results = predictor.predict_all() except Exception as e: print "ERROR", e.message return jsonify({"message" : e.message}) return jsonify(results) @app.route("/predict/<airport_code>", methods=["GET"]) def predict_delay(airport_code): firebase_source = mapper.get_source_firebase() airport_status = firebase_source.get_airport(airport_code) cleaned_data = utils.get_clean_data(airport_status) res = predictor.predict(airport_code) cleaned_data["prediction"] = bool(res[0]) return jsonify(cleaned_data) @app.route("/status", methods=["GET"]) def get_airport_statuses(): firebase_source = mapper.get_source_firebase() airports = firebase_source.get_all() results = [] for airport_code, status in airports.items(): try: results.append(utils.get_clean_data(status)) except: pass results = {"items":results} return jsonify(results)
Return index.html in root and transform /status results
Return index.html in root and transform /status results
Python
mit
MachineLearningProject/flight-delay-prediction,MachineLearningProject/flight-delay-prediction,MachineLearningProject/flight-delay-prediction
python
## Code Before: from flask import jsonify from . import app import mapper import utils from predict import predictor @app.route("/", methods=["GET"]) def index(): firebase_dump = mapper.get_dump_firebase() response = firebase_dump.get_all() response = response or {} return jsonify(response) @app.route("/build", methods=["POST"]) def build_model(): predictor.preprocess_airports() if not predictor.model: predictor.build_model() return jsonify({"message:" : "OK"}) @app.route("/predict", methods=["GET"]) def predict_all_delays(): results = None try: results = predictor.predict_all() except Exception as e: return jsonify({"message" : e.message}) return jsonify(results) @app.route("/predict/<airport_code>", methods=["GET"]) def predict_delay(airport_code): firebase_source = mapper.get_source_firebase() airport_status = firebase_source.get_airport(airport_code) cleaned_data = utils.get_clean_data(airport_status) res = predictor.predict(airport_code) cleaned_data["prediction"] = bool(res[0]) return jsonify(cleaned_data) @app.route("/status", methods=["GET"]) def get_airport_statuses(): firebase_source = mapper.get_source_firebase() airports = firebase_source.get_all() for airport_code, status in airports.items(): if "status" in status: del status["status"] return jsonify(airports) ## Instruction: Return index.html in root and transform /status results ## Code After: from flask import jsonify from . import app import mapper import utils from predict import predictor @app.route("/", methods=["GET"]) def index(): return app.send_static_file("index.html") @app.route("/build", methods=["POST"]) def build_model(): predictor.preprocess_airports() if not predictor.model: predictor.build_model() return jsonify({"message:" : "OK"}) @app.route("/predict", methods=["GET"]) def predict_all_delays(): results = None try: results = predictor.predict_all() except Exception as e: print "ERROR", e.message return jsonify({"message" : e.message}) return jsonify(results) @app.route("/predict/<airport_code>", methods=["GET"]) def predict_delay(airport_code): firebase_source = mapper.get_source_firebase() airport_status = firebase_source.get_airport(airport_code) cleaned_data = utils.get_clean_data(airport_status) res = predictor.predict(airport_code) cleaned_data["prediction"] = bool(res[0]) return jsonify(cleaned_data) @app.route("/status", methods=["GET"]) def get_airport_statuses(): firebase_source = mapper.get_source_firebase() airports = firebase_source.get_all() results = [] for airport_code, status in airports.items(): try: results.append(utils.get_clean_data(status)) except: pass results = {"items":results} return jsonify(results)
... @app.route("/", methods=["GET"]) def index(): return app.send_static_file("index.html") @app.route("/build", methods=["POST"]) def build_model(): ... try: results = predictor.predict_all() except Exception as e: print "ERROR", e.message return jsonify({"message" : e.message}) return jsonify(results) ... def get_airport_statuses(): firebase_source = mapper.get_source_firebase() airports = firebase_source.get_all() results = [] for airport_code, status in airports.items(): try: results.append(utils.get_clean_data(status)) except: pass results = {"items":results} return jsonify(results) ...
a9158ccc9aceac55d715f76e5641eb22b459f3a4
build.gradle.kts
build.gradle.kts
/** * Builds and runs the site locally. */ task<Exec>("runSite") { commandLine("./_script/jekyll-serve") } task<Exec>("buildSite") { commandLine("./_script/jekyll-build") }
/** * Builds and runs the site locally. */ task<Exec>("runSite") { commandLine("./_script/jekyll-serve") } /** * Builds the site without starting the server. */ task<Exec>("buildSite") { commandLine("./_script/jekyll-build") }
Document the site build task
Document the site build task
Kotlin
apache-2.0
SpineEventEngine/SpineEventEngine.github.io,SpineEventEngine/SpineEventEngine.github.io,SpineEventEngine/SpineEventEngine.github.io,SpineEventEngine/SpineEventEngine.github.io,SpineEventEngine/SpineEventEngine.github.io,SpineEventEngine/SpineEventEngine.github.io
kotlin
## Code Before: /** * Builds and runs the site locally. */ task<Exec>("runSite") { commandLine("./_script/jekyll-serve") } task<Exec>("buildSite") { commandLine("./_script/jekyll-build") } ## Instruction: Document the site build task ## Code After: /** * Builds and runs the site locally. */ task<Exec>("runSite") { commandLine("./_script/jekyll-serve") } /** * Builds the site without starting the server. */ task<Exec>("buildSite") { commandLine("./_script/jekyll-build") }
// ... existing code ... commandLine("./_script/jekyll-serve") } /** * Builds the site without starting the server. */ task<Exec>("buildSite") { commandLine("./_script/jekyll-build") } // ... rest of the code ...
8dc265ac0c2bbea683d900f64c5080a23879c9da
spacy/tests/lang/da/test_exceptions.py
spacy/tests/lang/da/test_exceptions.py
from __future__ import unicode_literals import pytest @pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."]) def test_da_tokenizer_handles_abbr(da_tokenizer, text): tokens = da_tokenizer(text) assert len(tokens) == 1 def test_da_tokenizer_handles_exc_in_text(da_tokenizer): text = "Det er bl.a. ikke meningen" tokens = da_tokenizer(text) assert len(tokens) == 5 assert tokens[2].text == "bl.a."
from __future__ import unicode_literals import pytest @pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."]) def test_da_tokenizer_handles_abbr(da_tokenizer, text): tokens = da_tokenizer(text) assert len(tokens) == 1 def test_da_tokenizer_handles_exc_in_text(da_tokenizer): text = "Det er bl.a. ikke meningen" tokens = da_tokenizer(text) assert len(tokens) == 5 assert tokens[2].text == "bl.a." def test_da_tokenizer_handles_custom_base_exc(da_tokenizer): text = "Her er noget du kan kigge i." tokens = da_tokenizer(text) assert len(tokens) == 8 assert tokens[6].text == "i" assert tokens[7].text == "."
Add test for tokenization of 'i.' for Danish.
Add test for tokenization of 'i.' for Danish.
Python
mit
explosion/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy
python
## Code Before: from __future__ import unicode_literals import pytest @pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."]) def test_da_tokenizer_handles_abbr(da_tokenizer, text): tokens = da_tokenizer(text) assert len(tokens) == 1 def test_da_tokenizer_handles_exc_in_text(da_tokenizer): text = "Det er bl.a. ikke meningen" tokens = da_tokenizer(text) assert len(tokens) == 5 assert tokens[2].text == "bl.a." ## Instruction: Add test for tokenization of 'i.' for Danish. ## Code After: from __future__ import unicode_literals import pytest @pytest.mark.parametrize('text', ["ca.", "m.a.o.", "Jan.", "Dec."]) def test_da_tokenizer_handles_abbr(da_tokenizer, text): tokens = da_tokenizer(text) assert len(tokens) == 1 def test_da_tokenizer_handles_exc_in_text(da_tokenizer): text = "Det er bl.a. ikke meningen" tokens = da_tokenizer(text) assert len(tokens) == 5 assert tokens[2].text == "bl.a." def test_da_tokenizer_handles_custom_base_exc(da_tokenizer): text = "Her er noget du kan kigge i." tokens = da_tokenizer(text) assert len(tokens) == 8 assert tokens[6].text == "i" assert tokens[7].text == "."
... tokens = da_tokenizer(text) assert len(tokens) == 5 assert tokens[2].text == "bl.a." def test_da_tokenizer_handles_custom_base_exc(da_tokenizer): text = "Her er noget du kan kigge i." tokens = da_tokenizer(text) assert len(tokens) == 8 assert tokens[6].text == "i" assert tokens[7].text == "." ...
2117778d777120293e506eca9743f97619b5ad5c
kiwi/interface.py
kiwi/interface.py
class Menu(object): def __init__(self, dialog, items, title, caller = None): self.d = dialog self.caller = caller self.entries = [] self.dispatch_table = {} tag = 1 self.title = title for entry, func in items: self.entries.append(tuple([str(tag), entry])) self.dispatch_table[str(tag)] = func tag += 1 def run(self, ret=None): code, tag = self.d.menu(self.title, choices=self.entries) if code == self.d.OK: self.dispatch(tag) if ret: ret() def dispatch(self, tag): if tag in self.dispatch_table: func = self.dispatch_table[tag] if isinstance(func, Menu): func.run(ret=self.run) else: func()
class MenuItem(object): def __init__(self, func=None): if func: self.function = func # Wrapper for child.function() that creates a call stack def run(self, ret=None): self.function() if ret: ret() class Menu(MenuItem): def __init__(self, dialog, items, title): self.d = dialog self.entries = [] self.dispatch_table = {} tag = 1 self.title = title for entry, func in items: self.entries.append(tuple([str(tag), entry])) self.dispatch_table[str(tag)] = func tag += 1 def function(self): code, tag = self.d.menu(self.title, choices=self.entries) if code == self.d.OK: self._dispatch(tag) def _dispatch(self, tag): if tag in self.dispatch_table: func = self.dispatch_table[tag] if isinstance(func, MenuItem): func.run(ret=self.run) else: func()
Create object MenuItem that wraps functions to create a call stack
Create object MenuItem that wraps functions to create a call stack
Python
mit
jakogut/KiWI
python
## Code Before: class Menu(object): def __init__(self, dialog, items, title, caller = None): self.d = dialog self.caller = caller self.entries = [] self.dispatch_table = {} tag = 1 self.title = title for entry, func in items: self.entries.append(tuple([str(tag), entry])) self.dispatch_table[str(tag)] = func tag += 1 def run(self, ret=None): code, tag = self.d.menu(self.title, choices=self.entries) if code == self.d.OK: self.dispatch(tag) if ret: ret() def dispatch(self, tag): if tag in self.dispatch_table: func = self.dispatch_table[tag] if isinstance(func, Menu): func.run(ret=self.run) else: func() ## Instruction: Create object MenuItem that wraps functions to create a call stack ## Code After: class MenuItem(object): def __init__(self, func=None): if func: self.function = func # Wrapper for child.function() that creates a call stack def run(self, ret=None): self.function() if ret: ret() class Menu(MenuItem): def __init__(self, dialog, items, title): self.d = dialog self.entries = [] self.dispatch_table = {} tag = 1 self.title = title for entry, func in items: self.entries.append(tuple([str(tag), entry])) self.dispatch_table[str(tag)] = func tag += 1 def function(self): code, tag = self.d.menu(self.title, choices=self.entries) if code == self.d.OK: self._dispatch(tag) def _dispatch(self, tag): if tag in self.dispatch_table: func = self.dispatch_table[tag] if isinstance(func, MenuItem): func.run(ret=self.run) else: func()
# ... existing code ... class MenuItem(object): def __init__(self, func=None): if func: self.function = func # Wrapper for child.function() that creates a call stack def run(self, ret=None): self.function() if ret: ret() class Menu(MenuItem): def __init__(self, dialog, items, title): self.d = dialog self.entries = [] self.dispatch_table = {} # ... modified code ... self.dispatch_table[str(tag)] = func tag += 1 def function(self): code, tag = self.d.menu(self.title, choices=self.entries) if code == self.d.OK: self._dispatch(tag) def _dispatch(self, tag): if tag in self.dispatch_table: func = self.dispatch_table[tag] if isinstance(func, MenuItem): func.run(ret=self.run) else: func() # ... rest of the code ...
0cda764617dcbf52c36d4a63e240b6f849b06640
tests/app/test_application.py
tests/app/test_application.py
from .helpers import BaseApplicationTest class TestApplication(BaseApplicationTest): def test_index(self): response = self.client.get('/') assert 200 == response.status_code
from .helpers import BaseApplicationTest class TestApplication(BaseApplicationTest): def test_index(self): response = self.client.get('/') assert 200 == response.status_code def test_404(self): response = self.client.get('/not-found') assert 404 == response.status_code
Add test for not found URLs
Add test for not found URLs
Python
mit
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
python
## Code Before: from .helpers import BaseApplicationTest class TestApplication(BaseApplicationTest): def test_index(self): response = self.client.get('/') assert 200 == response.status_code ## Instruction: Add test for not found URLs ## Code After: from .helpers import BaseApplicationTest class TestApplication(BaseApplicationTest): def test_index(self): response = self.client.get('/') assert 200 == response.status_code def test_404(self): response = self.client.get('/not-found') assert 404 == response.status_code
... def test_index(self): response = self.client.get('/') assert 200 == response.status_code def test_404(self): response = self.client.get('/not-found') assert 404 == response.status_code ...
9793107fb218bdff796d8df55404156e299e33ea
website/apps/ts_om/check.py
website/apps/ts_om/check.py
import os from django.conf import settings __author__ = 'nreed' url_dict = { 'validate': 'http://127.0.0.1:8000/om_validate/validate/', 'scenarios': '/home/nreed/scenarios/', 'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/' } def check_dir(local_dir, typ): if local_dir is None or local_dir == '': return url_dict[typ] if os.name == "nt": if not local_dir.endswith('\\'): local_dir += '\\' else: if not local_dir.endswith('/'): local_dir += '/' return local_dir def check_url(url, typ): if url is None or url == '': return url_dict[typ] if not url.endswith('/'): url += '/' return url
import os from django.conf import settings __author__ = 'nreed' url_dict = { 'validate': 'http://127.0.0.1:8000/om_validate/validate/', 'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/', 'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/' } def check_dir(local_dir, typ): if local_dir is None or local_dir == '': return url_dict[typ] if os.name == "nt": if not local_dir.endswith('\\'): local_dir += '\\' else: if not local_dir.endswith('/'): local_dir += '/' return local_dir def check_url(url, typ): if url is None or url == '': return url_dict[typ] if not url.endswith('/'): url += '/' return url
Set default scenarios directory to within root of project.
Set default scenarios directory to within root of project.
Python
mpl-2.0
vecnet/om,vecnet/om,vecnet/om,vecnet/om,vecnet/om
python
## Code Before: import os from django.conf import settings __author__ = 'nreed' url_dict = { 'validate': 'http://127.0.0.1:8000/om_validate/validate/', 'scenarios': '/home/nreed/scenarios/', 'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/' } def check_dir(local_dir, typ): if local_dir is None or local_dir == '': return url_dict[typ] if os.name == "nt": if not local_dir.endswith('\\'): local_dir += '\\' else: if not local_dir.endswith('/'): local_dir += '/' return local_dir def check_url(url, typ): if url is None or url == '': return url_dict[typ] if not url.endswith('/'): url += '/' return url ## Instruction: Set default scenarios directory to within root of project. ## Code After: import os from django.conf import settings __author__ = 'nreed' url_dict = { 'validate': 'http://127.0.0.1:8000/om_validate/validate/', 'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/', 'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/' } def check_dir(local_dir, typ): if local_dir is None or local_dir == '': return url_dict[typ] if os.name == "nt": if not local_dir.endswith('\\'): local_dir += '\\' else: if not local_dir.endswith('/'): local_dir += '/' return local_dir def check_url(url, typ): if url is None or url == '': return url_dict[typ] if not url.endswith('/'): url += '/' return url
// ... existing code ... url_dict = { 'validate': 'http://127.0.0.1:8000/om_validate/validate/', 'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/', 'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/' } // ... rest of the code ...
f3e3b43abebfad0fcaa20df8eac20e3cb8c099d6
imgproc.py
imgproc.py
from SimpleCV import * import numpy import cv2 def process_image(obj, img, config): """ :param obj: Object we're tracking :param img: Input image :param config: Controls :return: Mask with candidates surrounded in a green rectangle """ hsv_image = img.toHSV() segmented = Image(cv2.inRange(hsv_image.getNumpy(), numpy.array([config.min_hue, config.min_sat, config.min_val]), numpy.array([config.max_hue, config.max_sat, config.max_val]))) segmented = segmented.dilate(2) blobs = segmented.findBlobs() if blobs: for b in blobs: if b.radius() > 10: rect_width = b.minRectWidth() rect_height = b.minRectHeight() aspect_ratio = rect_width / rect_height square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio) if square_error < 0.1: # minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2 segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width, rect_height, color=Color.GREEN, width=6) # Give the result mask return segmented.applyLayers()
from SimpleCV import * import numpy import cv2 def process_image(obj, img, config, each_blob=None): """ :param obj: Object we're tracking :param img: Input image :param config: Controls :param each_blob: function, taking a SimpleCV.Blob as an argument, that is called for every candidate blob :return: Mask with candidates """ hsv_image = img.toHSV() segmented = Image(cv2.inRange(hsv_image.getNumpy(), numpy.array([config.min_hue, config.min_sat, config.min_val]), numpy.array([config.max_hue, config.max_sat, config.max_val]))) segmented = segmented.dilate(2) blobs = segmented.findBlobs() if blobs: for b in blobs: if b.radius() > 10: rect_width = b.minRectWidth() rect_height = b.minRectHeight() aspect_ratio = rect_width / rect_height square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio) if square_error < 0.1: if not each_blob: # default to just outlining # minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2 segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width, rect_height, color=Color.GREEN, width=6) else: each_blob(b) # Give the result mask return segmented.applyLayers()
Allow a function to be called whenever a candidate blob is found during image processing
Allow a function to be called whenever a candidate blob is found during image processing
Python
mit
mstojcevich/Flash-Vision
python
## Code Before: from SimpleCV import * import numpy import cv2 def process_image(obj, img, config): """ :param obj: Object we're tracking :param img: Input image :param config: Controls :return: Mask with candidates surrounded in a green rectangle """ hsv_image = img.toHSV() segmented = Image(cv2.inRange(hsv_image.getNumpy(), numpy.array([config.min_hue, config.min_sat, config.min_val]), numpy.array([config.max_hue, config.max_sat, config.max_val]))) segmented = segmented.dilate(2) blobs = segmented.findBlobs() if blobs: for b in blobs: if b.radius() > 10: rect_width = b.minRectWidth() rect_height = b.minRectHeight() aspect_ratio = rect_width / rect_height square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio) if square_error < 0.1: # minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2 segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width, rect_height, color=Color.GREEN, width=6) # Give the result mask return segmented.applyLayers() ## Instruction: Allow a function to be called whenever a candidate blob is found during image processing ## Code After: from SimpleCV import * import numpy import cv2 def process_image(obj, img, config, each_blob=None): """ :param obj: Object we're tracking :param img: Input image :param config: Controls :param each_blob: function, taking a SimpleCV.Blob as an argument, that is called for every candidate blob :return: Mask with candidates """ hsv_image = img.toHSV() segmented = Image(cv2.inRange(hsv_image.getNumpy(), numpy.array([config.min_hue, config.min_sat, config.min_val]), numpy.array([config.max_hue, config.max_sat, config.max_val]))) segmented = segmented.dilate(2) blobs = segmented.findBlobs() if blobs: for b in blobs: if b.radius() > 10: rect_width = b.minRectWidth() rect_height = b.minRectHeight() aspect_ratio = rect_width / rect_height square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio) if square_error < 0.1: if not each_blob: # default to just outlining # minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2 segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width, rect_height, color=Color.GREEN, width=6) else: each_blob(b) # Give the result mask return segmented.applyLayers()
... import cv2 def process_image(obj, img, config, each_blob=None): """ :param obj: Object we're tracking :param img: Input image :param config: Controls :param each_blob: function, taking a SimpleCV.Blob as an argument, that is called for every candidate blob :return: Mask with candidates """ hsv_image = img.toHSV() segmented = Image(cv2.inRange(hsv_image.getNumpy(), ... aspect_ratio = rect_width / rect_height square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio) if square_error < 0.1: if not each_blob: # default to just outlining # minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2 segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width, rect_height, color=Color.GREEN, width=6) else: each_blob(b) # Give the result mask return segmented.applyLayers() ...
eb1fdf3419bdfd1d5920d73a877f707162b783b0
cfgrib/__init__.py
cfgrib/__init__.py
__version__ = "0.9.9.2.dev0" # cfgrib core API depends on the ECMWF ecCodes C-library only from .cfmessage import CfMessage from .dataset import ( Dataset, DatasetBuildError, open_container, open_file, open_fileindex, open_from_index, ) from .messages import FileStream, Message # NOTE: xarray is not a hard dependency, but let's provide helpers if it is available. try: from .xarray_store import open_dataset, open_datasets except ImportError: pass
__version__ = "0.9.9.2.dev0" # cfgrib core API depends on the ECMWF ecCodes C-library only from .cfmessage import CfMessage from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index from .messages import FileStream, Message # NOTE: xarray is not a hard dependency, but let's provide helpers if it is available. try: from .xarray_store import open_dataset, open_datasets except ImportError: pass
Drop unused and dangerous entrypoint `open_fileindex`
Drop unused and dangerous entrypoint `open_fileindex`
Python
apache-2.0
ecmwf/cfgrib
python
## Code Before: __version__ = "0.9.9.2.dev0" # cfgrib core API depends on the ECMWF ecCodes C-library only from .cfmessage import CfMessage from .dataset import ( Dataset, DatasetBuildError, open_container, open_file, open_fileindex, open_from_index, ) from .messages import FileStream, Message # NOTE: xarray is not a hard dependency, but let's provide helpers if it is available. try: from .xarray_store import open_dataset, open_datasets except ImportError: pass ## Instruction: Drop unused and dangerous entrypoint `open_fileindex` ## Code After: __version__ = "0.9.9.2.dev0" # cfgrib core API depends on the ECMWF ecCodes C-library only from .cfmessage import CfMessage from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index from .messages import FileStream, Message # NOTE: xarray is not a hard dependency, but let's provide helpers if it is available. try: from .xarray_store import open_dataset, open_datasets except ImportError: pass
... # cfgrib core API depends on the ECMWF ecCodes C-library only from .cfmessage import CfMessage from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index from .messages import FileStream, Message # NOTE: xarray is not a hard dependency, but let's provide helpers if it is available. ...
de630eb49412f0ef25cb5a010efd6922a5c4d28b
common.h
common.h
(byte & 0x80 ? '#' : ' '), \ (byte & 0x40 ? '#' : ' '), \ (byte & 0x20 ? '#' : ' '), \ (byte & 0x10 ? '#' : ' '), \ (byte & 0x08 ? '#' : ' '), \ (byte & 0x04 ? '#' : ' '), \ (byte & 0x02 ? '#' : ' '), \ (byte & 0x01 ? '#' : ' ') #define MLCD_WIDTH 48 #define MLCD_HEIGHT 32 #define MLCD_BYTES ((MLCD_WIDTH * MLCD_HEIGHT) / 8) #endif /* MLCD_COMMON_H */
(byte & 0x80 ? '1' : '0'), \ (byte & 0x40 ? '1' : '0'), \ (byte & 0x20 ? '1' : '0'), \ (byte & 0x10 ? '1' : '0'), \ (byte & 0x08 ? '1' : '0'), \ (byte & 0x04 ? '1' : '0'), \ (byte & 0x02 ? '1' : '0'), \ (byte & 0x01 ? '1' : '0') #define MLCD_WIDTH 48 #define MLCD_HEIGHT 32 #define MLCD_BYTES ((MLCD_WIDTH * MLCD_HEIGHT) / 8) #endif /* MLCD_COMMON_H */
Fix asm generation with mlcd -a
Fix asm generation with mlcd -a
C
bsd-2-clause
travispaul/mlcd
c
## Code Before: (byte & 0x80 ? '#' : ' '), \ (byte & 0x40 ? '#' : ' '), \ (byte & 0x20 ? '#' : ' '), \ (byte & 0x10 ? '#' : ' '), \ (byte & 0x08 ? '#' : ' '), \ (byte & 0x04 ? '#' : ' '), \ (byte & 0x02 ? '#' : ' '), \ (byte & 0x01 ? '#' : ' ') #define MLCD_WIDTH 48 #define MLCD_HEIGHT 32 #define MLCD_BYTES ((MLCD_WIDTH * MLCD_HEIGHT) / 8) #endif /* MLCD_COMMON_H */ ## Instruction: Fix asm generation with mlcd -a ## Code After: (byte & 0x80 ? '1' : '0'), \ (byte & 0x40 ? '1' : '0'), \ (byte & 0x20 ? '1' : '0'), \ (byte & 0x10 ? '1' : '0'), \ (byte & 0x08 ? '1' : '0'), \ (byte & 0x04 ? '1' : '0'), \ (byte & 0x02 ? '1' : '0'), \ (byte & 0x01 ? '1' : '0') #define MLCD_WIDTH 48 #define MLCD_HEIGHT 32 #define MLCD_BYTES ((MLCD_WIDTH * MLCD_HEIGHT) / 8) #endif /* MLCD_COMMON_H */
... (byte & 0x80 ? '1' : '0'), \ (byte & 0x40 ? '1' : '0'), \ (byte & 0x20 ? '1' : '0'), \ (byte & 0x10 ? '1' : '0'), \ (byte & 0x08 ? '1' : '0'), \ (byte & 0x04 ? '1' : '0'), \ (byte & 0x02 ? '1' : '0'), \ (byte & 0x01 ? '1' : '0') #define MLCD_WIDTH 48 #define MLCD_HEIGHT 32 ...
52a4ae2b1dbc35f768406dc831ae980ca3f43244
snapshots/hacl-c-experimental/aead_chacha20_poly1305.h
snapshots/hacl-c-experimental/aead_chacha20_poly1305.h
void blit(uint8_t* src, uint32_t len, uint8_t* dest, uint32_t pos); void poly1305_key_gen(uint8_t* otk, uint8_t* key, uint8_t* nonce); uint32_t hacl_aead_chacha20_poly1305_encrypt(uint8_t *plaintext, uint32_t plaintext_len, uint8_t *aad, uint32_t aad_len, uint8_t *key, uint8_t *iv, uint8_t *ciphertext, uint8_t *tag);
void poly1305_key_gen(uint8_t* otk, uint8_t* key, uint8_t* nonce); uint32_t hacl_aead_chacha20_poly1305_encrypt(uint8_t *plaintext, uint32_t plaintext_len, uint8_t *aad, uint32_t aad_len, uint8_t *key, uint8_t *iv, uint8_t *ciphertext, uint8_t *tag);
Remove exposed function from AEAD Chacha20 Poly1305 code
Remove exposed function from AEAD Chacha20 Poly1305 code
C
apache-2.0
mitls/hacl-star,mitls/hacl-star,mitls/hacl-star,mitls/hacl-star,mitls/hacl-star,mitls/hacl-star,mitls/hacl-star,mitls/hacl-star,mitls/hacl-star
c
## Code Before: void blit(uint8_t* src, uint32_t len, uint8_t* dest, uint32_t pos); void poly1305_key_gen(uint8_t* otk, uint8_t* key, uint8_t* nonce); uint32_t hacl_aead_chacha20_poly1305_encrypt(uint8_t *plaintext, uint32_t plaintext_len, uint8_t *aad, uint32_t aad_len, uint8_t *key, uint8_t *iv, uint8_t *ciphertext, uint8_t *tag); ## Instruction: Remove exposed function from AEAD Chacha20 Poly1305 code ## Code After: void poly1305_key_gen(uint8_t* otk, uint8_t* key, uint8_t* nonce); uint32_t hacl_aead_chacha20_poly1305_encrypt(uint8_t *plaintext, uint32_t plaintext_len, uint8_t *aad, uint32_t aad_len, uint8_t *key, uint8_t *iv, uint8_t *ciphertext, uint8_t *tag);
... void poly1305_key_gen(uint8_t* otk, uint8_t* key, uint8_t* nonce); ...
47b3fd8dd3b1baa364ee93c221ae41d7a61810cb
src/lib/ecore_x/xcb/ecore_xcb_missing.c
src/lib/ecore_x/xcb/ecore_xcb_missing.c
EAPI Eina_Bool ecore_x_input_multi_select(Ecore_X_Window win) { return 0; } EAPI void ecore_x_e_comp_sync_counter_set(Ecore_X_Window win, Ecore_X_Sync_Counter counter) { } EAPI void ecore_x_e_comp_sync_draw_done_send(Ecore_X_Window root, Ecore_X_Window win) { } EAPI Eina_Bool ecore_x_e_comp_sync_supported_get(Ecore_X_Window root) { return 0; }
EAPI void ecore_x_icccm_protocol_atoms_set(Ecore_X_Window win, Ecore_X_Atom *protos, int num) { } EAPI Eina_Bool ecore_x_input_multi_select(Ecore_X_Window win) { return 0; } EAPI void ecore_x_e_comp_sync_counter_set(Ecore_X_Window win, Ecore_X_Sync_Counter counter) { } EAPI void ecore_x_e_comp_sync_draw_done_send(Ecore_X_Window root, Ecore_X_Window win) { } EAPI Eina_Bool ecore_x_e_comp_sync_supported_get(Ecore_X_Window root) { return 0; }
Add one more missing function stub
Add one more missing function stub
C
mit
OpenInkpot-archive/ecore,OpenInkpot-archive/ecore,OpenInkpot-archive/ecore
c
## Code Before: EAPI Eina_Bool ecore_x_input_multi_select(Ecore_X_Window win) { return 0; } EAPI void ecore_x_e_comp_sync_counter_set(Ecore_X_Window win, Ecore_X_Sync_Counter counter) { } EAPI void ecore_x_e_comp_sync_draw_done_send(Ecore_X_Window root, Ecore_X_Window win) { } EAPI Eina_Bool ecore_x_e_comp_sync_supported_get(Ecore_X_Window root) { return 0; } ## Instruction: Add one more missing function stub ## Code After: EAPI void ecore_x_icccm_protocol_atoms_set(Ecore_X_Window win, Ecore_X_Atom *protos, int num) { } EAPI Eina_Bool ecore_x_input_multi_select(Ecore_X_Window win) { return 0; } EAPI void ecore_x_e_comp_sync_counter_set(Ecore_X_Window win, Ecore_X_Sync_Counter counter) { } EAPI void ecore_x_e_comp_sync_draw_done_send(Ecore_X_Window root, Ecore_X_Window win) { } EAPI Eina_Bool ecore_x_e_comp_sync_supported_get(Ecore_X_Window root) { return 0; }
// ... existing code ... EAPI void ecore_x_icccm_protocol_atoms_set(Ecore_X_Window win, Ecore_X_Atom *protos, int num) { } EAPI Eina_Bool ecore_x_input_multi_select(Ecore_X_Window win) // ... rest of the code ...
04608636f6e4fc004458560499338af4b871cddb
asyncio_irc/message.py
asyncio_irc/message.py
from .utils import to_bytes class Message: """A message recieved from the IRC network.""" def __init__(self, raw_message): self.raw = raw_message self.prefix, self.command, self.params, self.suffix = self._elements() def _elements(self): """ Split the raw message into it's component parts. Adapted from http://stackoverflow.com/a/930706/400691 """ message = self.raw.strip() prefix = b'' # Odd slicing required for bytes to avoid getting int instead of char # http://stackoverflow.com/q/28249597/400691 if message[0:1] == b':': prefix, message = message[1:].split(b' ', 1) suffix = b'' if b' :' in message: message, suffix = message.split(b' :', 1) command, *params = message.split() params = list(filter(None, params)) return prefix, command, params, suffix def message_bytes(command, prefix=b'', params=None, suffix=b''): command = to_bytes(command) prefix = to_bytes(prefix) params = list(map(to_bytes, params or [])) suffix = to_bytes(suffix) message = command if prefix: message = b':' + prefix + b' ' + message if params: params = b' '.join(params) message = message + b' ' + params if suffix: message = message + b' :' + suffix return message
from .utils import to_bytes class Message(bytes): """A message recieved from the IRC network.""" def __init__(self, raw_message_bytes_ignored): super().__init__() self.prefix, self.command, self.params, self.suffix = self._elements() def _elements(self): """ Split the raw message into it's component parts. Adapted from http://stackoverflow.com/a/930706/400691 """ message = self.strip() prefix = b'' # Odd slicing required for bytes to avoid getting int instead of char # http://stackoverflow.com/q/28249597/400691 if message[0:1] == b':': prefix, message = message[1:].split(b' ', 1) suffix = b'' if b' :' in message: message, suffix = message.split(b' :', 1) command, *params = message.split() params = list(filter(None, params)) return prefix, command, params, suffix def message_bytes(command, prefix=b'', params=None, suffix=b''): command = to_bytes(command) prefix = to_bytes(prefix) params = list(map(to_bytes, params or [])) suffix = to_bytes(suffix) message = command if prefix: message = b':' + prefix + b' ' + message if params: params = b' '.join(params) message = message + b' ' + params if suffix: message = message + b' :' + suffix return message
Make Message a subclass of bytes
Make Message a subclass of bytes
Python
bsd-2-clause
meshy/framewirc
python
## Code Before: from .utils import to_bytes class Message: """A message recieved from the IRC network.""" def __init__(self, raw_message): self.raw = raw_message self.prefix, self.command, self.params, self.suffix = self._elements() def _elements(self): """ Split the raw message into it's component parts. Adapted from http://stackoverflow.com/a/930706/400691 """ message = self.raw.strip() prefix = b'' # Odd slicing required for bytes to avoid getting int instead of char # http://stackoverflow.com/q/28249597/400691 if message[0:1] == b':': prefix, message = message[1:].split(b' ', 1) suffix = b'' if b' :' in message: message, suffix = message.split(b' :', 1) command, *params = message.split() params = list(filter(None, params)) return prefix, command, params, suffix def message_bytes(command, prefix=b'', params=None, suffix=b''): command = to_bytes(command) prefix = to_bytes(prefix) params = list(map(to_bytes, params or [])) suffix = to_bytes(suffix) message = command if prefix: message = b':' + prefix + b' ' + message if params: params = b' '.join(params) message = message + b' ' + params if suffix: message = message + b' :' + suffix return message ## Instruction: Make Message a subclass of bytes ## Code After: from .utils import to_bytes class Message(bytes): """A message recieved from the IRC network.""" def __init__(self, raw_message_bytes_ignored): super().__init__() self.prefix, self.command, self.params, self.suffix = self._elements() def _elements(self): """ Split the raw message into it's component parts. Adapted from http://stackoverflow.com/a/930706/400691 """ message = self.strip() prefix = b'' # Odd slicing required for bytes to avoid getting int instead of char # http://stackoverflow.com/q/28249597/400691 if message[0:1] == b':': prefix, message = message[1:].split(b' ', 1) suffix = b'' if b' :' in message: message, suffix = message.split(b' :', 1) command, *params = message.split() params = list(filter(None, params)) return prefix, command, params, suffix def message_bytes(command, prefix=b'', params=None, suffix=b''): command = to_bytes(command) prefix = to_bytes(prefix) params = list(map(to_bytes, params or [])) suffix = to_bytes(suffix) message = command if prefix: message = b':' + prefix + b' ' + message if params: params = b' '.join(params) message = message + b' ' + params if suffix: message = message + b' :' + suffix return message
// ... existing code ... from .utils import to_bytes class Message(bytes): """A message recieved from the IRC network.""" def __init__(self, raw_message_bytes_ignored): super().__init__() self.prefix, self.command, self.params, self.suffix = self._elements() def _elements(self): // ... modified code ... Adapted from http://stackoverflow.com/a/930706/400691 """ message = self.strip() prefix = b'' # Odd slicing required for bytes to avoid getting int instead of char // ... rest of the code ...
9f5c31e20540db2470d8e370fe35c18129613502
src/test/java/patterns/document/CarTest.java
src/test/java/patterns/document/CarTest.java
package patterns.document; import java.util.HashMap; import java.util.Map; import java.util.OptionalDouble; import java.util.OptionalInt; import org.junit.Test; import org.junit.Assert; public class CarTest { private static final double PRICE = 100.0; private static final String MODEL = "Audi"; private static final String COLOR = "red"; private static final int WHEELS_COUNT = 4; @Test public void testCreateCar() throws Exception { Map<String, Object> entries = new HashMap<>(); Car car = new Car(entries); car.put(ColorTrait.KEY, COLOR); car.put(ModelTrait.KEY, MODEL); car.put(PriceTrait.KEY, PRICE); car.put(WheelsTrait.KEY, WHEELS_COUNT); String color = car.getColor(); Assert.assertEquals(COLOR, color); String model = car.getModel(); Assert.assertEquals(MODEL, model); OptionalDouble price = car.getPrice(); Assert.assertEquals(PRICE, price.getAsDouble()); OptionalInt wheels = car.getWheels(); Assert.assertEquals(WHEELS_COUNT, wheels.getAsInt()); } }
package patterns.document; import java.util.HashMap; import java.util.Map; import java.util.OptionalDouble; import java.util.OptionalInt; import org.junit.Assert; import org.junit.Test; public class CarTest { private static final double DELTA = 0.000001; private static final double PRICE = 100.0; private static final String MODEL = "Audi"; private static final String COLOR = "red"; private static final int WHEELS_COUNT = 4; @Test public void testCreateCar() throws Exception { Map<String, Object> entries = new HashMap<>(); Car car = new Car(entries); car.put(ColorTrait.KEY, COLOR); car.put(ModelTrait.KEY, MODEL); car.put(PriceTrait.KEY, PRICE); car.put(WheelsTrait.KEY, WHEELS_COUNT); String color = car.getColor(); Assert.assertEquals(COLOR, color); String model = car.getModel(); Assert.assertEquals(MODEL, model); OptionalDouble price = car.getPrice(); Assert.assertEquals(PRICE, price.getAsDouble(), DELTA); OptionalInt wheels = car.getWheels(); Assert.assertEquals(WHEELS_COUNT, wheels.getAsInt()); } }
Fix test after JUnit upgrade
Fix test after JUnit upgrade
Java
mit
bink81/java-experiments
java
## Code Before: package patterns.document; import java.util.HashMap; import java.util.Map; import java.util.OptionalDouble; import java.util.OptionalInt; import org.junit.Test; import org.junit.Assert; public class CarTest { private static final double PRICE = 100.0; private static final String MODEL = "Audi"; private static final String COLOR = "red"; private static final int WHEELS_COUNT = 4; @Test public void testCreateCar() throws Exception { Map<String, Object> entries = new HashMap<>(); Car car = new Car(entries); car.put(ColorTrait.KEY, COLOR); car.put(ModelTrait.KEY, MODEL); car.put(PriceTrait.KEY, PRICE); car.put(WheelsTrait.KEY, WHEELS_COUNT); String color = car.getColor(); Assert.assertEquals(COLOR, color); String model = car.getModel(); Assert.assertEquals(MODEL, model); OptionalDouble price = car.getPrice(); Assert.assertEquals(PRICE, price.getAsDouble()); OptionalInt wheels = car.getWheels(); Assert.assertEquals(WHEELS_COUNT, wheels.getAsInt()); } } ## Instruction: Fix test after JUnit upgrade ## Code After: package patterns.document; import java.util.HashMap; import java.util.Map; import java.util.OptionalDouble; import java.util.OptionalInt; import org.junit.Assert; import org.junit.Test; public class CarTest { private static final double DELTA = 0.000001; private static final double PRICE = 100.0; private static final String MODEL = "Audi"; private static final String COLOR = "red"; private static final int WHEELS_COUNT = 4; @Test public void testCreateCar() throws Exception { Map<String, Object> entries = new HashMap<>(); Car car = new Car(entries); car.put(ColorTrait.KEY, COLOR); car.put(ModelTrait.KEY, MODEL); car.put(PriceTrait.KEY, PRICE); car.put(WheelsTrait.KEY, WHEELS_COUNT); String color = car.getColor(); Assert.assertEquals(COLOR, color); String model = car.getModel(); Assert.assertEquals(MODEL, model); OptionalDouble price = car.getPrice(); Assert.assertEquals(PRICE, price.getAsDouble(), DELTA); OptionalInt wheels = car.getWheels(); Assert.assertEquals(WHEELS_COUNT, wheels.getAsInt()); } }
# ... existing code ... import java.util.OptionalDouble; import java.util.OptionalInt; import org.junit.Assert; import org.junit.Test; public class CarTest { private static final double DELTA = 0.000001; private static final double PRICE = 100.0; private static final String MODEL = "Audi"; private static final String COLOR = "red"; # ... modified code ... String model = car.getModel(); Assert.assertEquals(MODEL, model); OptionalDouble price = car.getPrice(); Assert.assertEquals(PRICE, price.getAsDouble(), DELTA); OptionalInt wheels = car.getWheels(); Assert.assertEquals(WHEELS_COUNT, wheels.getAsInt()); } # ... rest of the code ...
25746ab22ce7031e1bbee27bb04af73264525f4c
game/functional/test_input.py
game/functional/test_input.py
from twisted.trial.unittest import TestCase from twisted.internet import reactor from game.functional.test_view3d import SceneMixin from game.player import Player from game.vector import Vector class StdoutReportingController(object): # XXX Make an interface for the controller and verify this fake. def __init__(self): self.player = Player(Vector(0, 0, 0), 0, reactor.seconds) def keyUp(self, key): pass def keyDown(self, key): pass def mouseMotion(self, pos, rel, buttons): """ Report to standard out the direction of the mouse movement. """ if rel[0] < 0: print 'left', elif rel[0] > 0: print 'right', if rel[1] < 0: print 'up', if rel[1] > 0: print 'down', print class MouseInputTests(SceneMixin, TestCase): """ Tests for mouse input. """ def test_movement(self): """ When the mouse moves, the direction of movement is written to stdout. """ self.window.submitTo(StdoutReportingController()) reactor.callLater(2.0, self.window.stop) return self.window.go()
from pygame import K_q from twisted.trial.unittest import TestCase from twisted.internet import reactor from game.functional.test_view3d import SceneMixin from game.player import Player from game.vector import Vector class QuittableController(object): # XXX Make an interface for the controller and verify these fakes. def __init__(self, reactor, window): self.player = Player(Vector(0, 0, 0), 0, reactor.seconds) self.window = window def keyUp(self, key): if key == K_q: self.window.stop() def keyDown(self, key): pass def mouseMotion(self, pos, rel, buttons): pass class StdoutReportingController(QuittableController): def mouseMotion(self, pos, rel, buttons): """ Report to standard out the direction of the mouse movement. """ if rel[0] < 0: print 'left', elif rel[0] > 0: print 'right', if rel[1] < 0: print 'up', if rel[1] > 0: print 'down', print class MouseInputTests(SceneMixin, TestCase): """ Tests for mouse input. """ def test_movement(self): """ When the mouse moves, the direction of movement is written to stdout. """ self.window.submitTo(StdoutReportingController(reactor, self.window)) return self.window.go() def test_grab(self): """ Clicking on the window grabs the mouse. Clicking again releases it. """ self.window.submitTo(QuittableController(reactor, self.window)) return self.window.go()
Add a functional test for mouse grab.
Add a functional test for mouse grab.
Python
mit
eriknelson/gam3
python
## Code Before: from twisted.trial.unittest import TestCase from twisted.internet import reactor from game.functional.test_view3d import SceneMixin from game.player import Player from game.vector import Vector class StdoutReportingController(object): # XXX Make an interface for the controller and verify this fake. def __init__(self): self.player = Player(Vector(0, 0, 0), 0, reactor.seconds) def keyUp(self, key): pass def keyDown(self, key): pass def mouseMotion(self, pos, rel, buttons): """ Report to standard out the direction of the mouse movement. """ if rel[0] < 0: print 'left', elif rel[0] > 0: print 'right', if rel[1] < 0: print 'up', if rel[1] > 0: print 'down', print class MouseInputTests(SceneMixin, TestCase): """ Tests for mouse input. """ def test_movement(self): """ When the mouse moves, the direction of movement is written to stdout. """ self.window.submitTo(StdoutReportingController()) reactor.callLater(2.0, self.window.stop) return self.window.go() ## Instruction: Add a functional test for mouse grab. ## Code After: from pygame import K_q from twisted.trial.unittest import TestCase from twisted.internet import reactor from game.functional.test_view3d import SceneMixin from game.player import Player from game.vector import Vector class QuittableController(object): # XXX Make an interface for the controller and verify these fakes. def __init__(self, reactor, window): self.player = Player(Vector(0, 0, 0), 0, reactor.seconds) self.window = window def keyUp(self, key): if key == K_q: self.window.stop() def keyDown(self, key): pass def mouseMotion(self, pos, rel, buttons): pass class StdoutReportingController(QuittableController): def mouseMotion(self, pos, rel, buttons): """ Report to standard out the direction of the mouse movement. """ if rel[0] < 0: print 'left', elif rel[0] > 0: print 'right', if rel[1] < 0: print 'up', if rel[1] > 0: print 'down', print class MouseInputTests(SceneMixin, TestCase): """ Tests for mouse input. """ def test_movement(self): """ When the mouse moves, the direction of movement is written to stdout. """ self.window.submitTo(StdoutReportingController(reactor, self.window)) return self.window.go() def test_grab(self): """ Clicking on the window grabs the mouse. Clicking again releases it. """ self.window.submitTo(QuittableController(reactor, self.window)) return self.window.go()
... from pygame import K_q from twisted.trial.unittest import TestCase from twisted.internet import reactor ... from game.player import Player from game.vector import Vector class QuittableController(object): # XXX Make an interface for the controller and verify these fakes. def __init__(self, reactor, window): self.player = Player(Vector(0, 0, 0), 0, reactor.seconds) self.window = window def keyUp(self, key): if key == K_q: self.window.stop() def keyDown(self, key): pass def mouseMotion(self, pos, rel, buttons): pass class StdoutReportingController(QuittableController): def mouseMotion(self, pos, rel, buttons): """ Report to standard out the direction of the mouse movement. ... """ When the mouse moves, the direction of movement is written to stdout. """ self.window.submitTo(StdoutReportingController(reactor, self.window)) return self.window.go() def test_grab(self): """ Clicking on the window grabs the mouse. Clicking again releases it. """ self.window.submitTo(QuittableController(reactor, self.window)) return self.window.go() ...
a0311b0fda0474ff073a85ca78f63e9c2ab3431e
src/main/java/io/github/saidie/plantuml_api/AccountRepository.java
src/main/java/io/github/saidie/plantuml_api/AccountRepository.java
package io.github.saidie.plantuml_api; import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.rest.core.annotation.RepositoryRestResource; @RepositoryRestResource public interface AccountRepository extends PagingAndSortingRepository<Account, Long> { }
package io.github.saidie.plantuml_api; import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.rest.core.annotation.RepositoryRestResource; @RepositoryRestResource public interface AccountRepository extends PagingAndSortingRepository<Account, Long> { public Account findByUsername(String username); }
Add method to find account by username to the repository
Add method to find account by username to the repository
Java
mit
saidie/plantuml-api,saidie/plantuml-api
java
## Code Before: package io.github.saidie.plantuml_api; import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.rest.core.annotation.RepositoryRestResource; @RepositoryRestResource public interface AccountRepository extends PagingAndSortingRepository<Account, Long> { } ## Instruction: Add method to find account by username to the repository ## Code After: package io.github.saidie.plantuml_api; import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.rest.core.annotation.RepositoryRestResource; @RepositoryRestResource public interface AccountRepository extends PagingAndSortingRepository<Account, Long> { public Account findByUsername(String username); }
... @RepositoryRestResource public interface AccountRepository extends PagingAndSortingRepository<Account, Long> { public Account findByUsername(String username); } ...
21f209b618850d15734c476bd3c1b359b9a7426e
infosystem/queue.py
infosystem/queue.py
import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): self.url = flask.current_app.config['ORMENU_QUEUE_URL'] self.port = flask.current_app.config['ORMENU_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['ORMENU_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['ORMENU_QUEUE_USERNAME'] self.password = flask.current_app.config['ORMENU_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close()
import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): self.url = flask.current_app.config['INFOSYSTEM_QUEUE_URL'] self.port = flask.current_app.config['INFOSYSTEM_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['INFOSYSTEM_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['INFOSYSTEM_QUEUE_USERNAME'] self.password = flask.current_app.config['INFOSYSTEM_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close()
Use INFOSYSTEM enviroment for Queue
Use INFOSYSTEM enviroment for Queue
Python
apache-2.0
samueldmq/infosystem
python
## Code Before: import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): self.url = flask.current_app.config['ORMENU_QUEUE_URL'] self.port = flask.current_app.config['ORMENU_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['ORMENU_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['ORMENU_QUEUE_USERNAME'] self.password = flask.current_app.config['ORMENU_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close() ## Instruction: Use INFOSYSTEM enviroment for Queue ## Code After: import flask from pika import BlockingConnection, PlainCredentials, ConnectionParameters class RabbitMQ: def __init__(self): self.url = flask.current_app.config['INFOSYSTEM_QUEUE_URL'] self.port = flask.current_app.config['INFOSYSTEM_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['INFOSYSTEM_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['INFOSYSTEM_QUEUE_USERNAME'] self.password = flask.current_app.config['INFOSYSTEM_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) def connect(self): try: return BlockingConnection(self.params) except Exception as e: raise class ProducerQueue: def __init__(self, exchange, exchange_type): rabbitMQ = RabbitMQ() self.connection = rabbitMQ.connect() self.exchange = exchange self.channel = self.connection.channel() self.channel.exchange_declare( exchange=exchange, exchange_type=exchange_type, durable=True) def publish(self, routing_key): body = "" self.channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=body) self.close() def close(self): self.channel.close() self.connection.close()
... class RabbitMQ: def __init__(self): self.url = flask.current_app.config['INFOSYSTEM_QUEUE_URL'] self.port = flask.current_app.config['INFOSYSTEM_QUEUE_PORT'] self.virtual_host = \ flask.current_app.config['INFOSYSTEM_QUEUE_VIRTUAL_HOST'] self.username = flask.current_app.config['INFOSYSTEM_QUEUE_USERNAME'] self.password = flask.current_app.config['INFOSYSTEM_QUEUE_PASSWORD'] credentials = PlainCredentials(self.username, self.password) self.params = ConnectionParameters( self.url, self.port, self.virtual_host, credentials) ...
ea36c10d9be63bc24bd9ac8c45b2f143e8d8ffb7
unit-tests/src/org/commcare/dalvik/application/CommCareTestApplication.java
unit-tests/src/org/commcare/dalvik/application/CommCareTestApplication.java
package org.commcare.dalvik.application; import org.commcare.android.database.HybridFileBackedSqlStorage; import org.commcare.android.database.HybridFileBackedSqlStorageMock; import org.javarosa.core.services.storage.Persistable; /** * @author Phillip Mates ([email protected]). */ public class CommCareTestApplication extends CommCareApplication { @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedAppStorage(String name, Class<T> c) { return getCurrentApp().getFileBackedStorage(name, c); } @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedUserStorage(String storage, Class<T> c) { return new HybridFileBackedSqlStorageMock<>(storage, c, buildUserDbHandle(), getUserKeyRecordId()); } @Override public CommCareApp getCurrentApp() { return new CommCareTestApp(super.getCurrentApp()); } }
package org.commcare.dalvik.application; import org.commcare.android.database.HybridFileBackedSqlStorage; import org.commcare.android.database.HybridFileBackedSqlStorageMock; import org.javarosa.core.services.storage.Persistable; import org.junit.Assert; /** * @author Phillip Mates ([email protected]). */ public class CommCareTestApplication extends CommCareApplication { @Override public void onCreate() { super.onCreate(); Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread thread, Throwable ex) { Assert.fail(ex.getMessage()); } }); } @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedAppStorage(String name, Class<T> c) { return getCurrentApp().getFileBackedStorage(name, c); } @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedUserStorage(String storage, Class<T> c) { return new HybridFileBackedSqlStorageMock<>(storage, c, buildUserDbHandle(), getUserKeyRecordId()); } @Override public CommCareApp getCurrentApp() { return new CommCareTestApp(super.getCurrentApp()); } }
Add top-level exception handler that just fails test
Add top-level exception handler that just fails test
Java
apache-2.0
dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android
java
## Code Before: package org.commcare.dalvik.application; import org.commcare.android.database.HybridFileBackedSqlStorage; import org.commcare.android.database.HybridFileBackedSqlStorageMock; import org.javarosa.core.services.storage.Persistable; /** * @author Phillip Mates ([email protected]). */ public class CommCareTestApplication extends CommCareApplication { @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedAppStorage(String name, Class<T> c) { return getCurrentApp().getFileBackedStorage(name, c); } @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedUserStorage(String storage, Class<T> c) { return new HybridFileBackedSqlStorageMock<>(storage, c, buildUserDbHandle(), getUserKeyRecordId()); } @Override public CommCareApp getCurrentApp() { return new CommCareTestApp(super.getCurrentApp()); } } ## Instruction: Add top-level exception handler that just fails test ## Code After: package org.commcare.dalvik.application; import org.commcare.android.database.HybridFileBackedSqlStorage; import org.commcare.android.database.HybridFileBackedSqlStorageMock; import org.javarosa.core.services.storage.Persistable; import org.junit.Assert; /** * @author Phillip Mates ([email protected]). */ public class CommCareTestApplication extends CommCareApplication { @Override public void onCreate() { super.onCreate(); Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread thread, Throwable ex) { Assert.fail(ex.getMessage()); } }); } @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedAppStorage(String name, Class<T> c) { return getCurrentApp().getFileBackedStorage(name, c); } @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedUserStorage(String storage, Class<T> c) { return new HybridFileBackedSqlStorageMock<>(storage, c, buildUserDbHandle(), getUserKeyRecordId()); } @Override public CommCareApp getCurrentApp() { return new CommCareTestApp(super.getCurrentApp()); } }
// ... existing code ... import org.commcare.android.database.HybridFileBackedSqlStorage; import org.commcare.android.database.HybridFileBackedSqlStorageMock; import org.javarosa.core.services.storage.Persistable; import org.junit.Assert; /** * @author Phillip Mates ([email protected]). */ public class CommCareTestApplication extends CommCareApplication { @Override public void onCreate() { super.onCreate(); Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread thread, Throwable ex) { Assert.fail(ex.getMessage()); } }); } @Override public <T extends Persistable> HybridFileBackedSqlStorage<T> getFileBackedAppStorage(String name, Class<T> c) { return getCurrentApp().getFileBackedStorage(name, c); // ... rest of the code ...
5dbf0438957fe9066a82467c8a015387b0712369
src/main/java/li/l1t/tingo/service/TeacherService.java
src/main/java/li/l1t/tingo/service/TeacherService.java
package li.l1t.tingo.service; import li.l1t.tingo.exception.TeacherNotFoundException; import li.l1t.tingo.model.Teacher; import li.l1t.tingo.model.dto.TeacherDto; import li.l1t.tingo.model.repo.TeacherRepository; import org.dozer.DozerBeanMapper; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * Service handling teachers, providing a bridge between the controller and the model. * * @author <a href="http://xxyy.github.io/">xxyy</a> * @since 2016-02-14 */ @Service public class TeacherService { private final DozerBeanMapper dozerBeanMapper; private final TeacherRepository teacherRepository; @Autowired public TeacherService(DozerBeanMapper dozerBeanMapper, TeacherRepository teacherRepository) { this.dozerBeanMapper = dozerBeanMapper; this.teacherRepository = teacherRepository; } public Iterable<Teacher> getAllTeachers() { return teacherRepository.findAll(); } public Teacher getById(int id) { Teacher teacher = teacherRepository.findOne(id); if(teacher == null) { throw new TeacherNotFoundException(id); } return teacher; } public TeacherDto toDto(Teacher entity) { return dozerBeanMapper.map(entity, TeacherDto.class); } }
package li.l1t.tingo.service; import li.l1t.tingo.exception.TeacherNotFoundException; import li.l1t.tingo.model.Teacher; import li.l1t.tingo.model.dto.TeacherDto; import li.l1t.tingo.model.repo.TeacherRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * Service handling teachers, providing a bridge between the controller and the model. * * @author <a href="http://xxyy.github.io/">xxyy</a> * @since 2016-02-14 */ @Service public class TeacherService { @Autowired private TeacherRepository teacherRepository; public Iterable<Teacher> getAllTeachers() { return teacherRepository.findAll(); } public Teacher getById(int id) { Teacher teacher = teacherRepository.findOne(id); if(teacher == null) { throw new TeacherNotFoundException(id); } return teacher; } public TeacherDto toDto(Teacher entity) { TeacherDto dto = new TeacherDto(); dto.setId(entity.getId()); dto.setAbbreviation(entity.getAbbreviation()); dto.setName(entity.getName()); return dto; } }
Fix teacher ids not being mapped
Fix teacher ids not being mapped
Java
apache-2.0
xxyy/sic,xxyy/tingo,xxyy/sic,xxyy/tingo,xxyy/tingo,xxyy/sic
java
## Code Before: package li.l1t.tingo.service; import li.l1t.tingo.exception.TeacherNotFoundException; import li.l1t.tingo.model.Teacher; import li.l1t.tingo.model.dto.TeacherDto; import li.l1t.tingo.model.repo.TeacherRepository; import org.dozer.DozerBeanMapper; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * Service handling teachers, providing a bridge between the controller and the model. * * @author <a href="http://xxyy.github.io/">xxyy</a> * @since 2016-02-14 */ @Service public class TeacherService { private final DozerBeanMapper dozerBeanMapper; private final TeacherRepository teacherRepository; @Autowired public TeacherService(DozerBeanMapper dozerBeanMapper, TeacherRepository teacherRepository) { this.dozerBeanMapper = dozerBeanMapper; this.teacherRepository = teacherRepository; } public Iterable<Teacher> getAllTeachers() { return teacherRepository.findAll(); } public Teacher getById(int id) { Teacher teacher = teacherRepository.findOne(id); if(teacher == null) { throw new TeacherNotFoundException(id); } return teacher; } public TeacherDto toDto(Teacher entity) { return dozerBeanMapper.map(entity, TeacherDto.class); } } ## Instruction: Fix teacher ids not being mapped ## Code After: package li.l1t.tingo.service; import li.l1t.tingo.exception.TeacherNotFoundException; import li.l1t.tingo.model.Teacher; import li.l1t.tingo.model.dto.TeacherDto; import li.l1t.tingo.model.repo.TeacherRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * Service handling teachers, providing a bridge between the controller and the model. * * @author <a href="http://xxyy.github.io/">xxyy</a> * @since 2016-02-14 */ @Service public class TeacherService { @Autowired private TeacherRepository teacherRepository; public Iterable<Teacher> getAllTeachers() { return teacherRepository.findAll(); } public Teacher getById(int id) { Teacher teacher = teacherRepository.findOne(id); if(teacher == null) { throw new TeacherNotFoundException(id); } return teacher; } public TeacherDto toDto(Teacher entity) { TeacherDto dto = new TeacherDto(); dto.setId(entity.getId()); dto.setAbbreviation(entity.getAbbreviation()); dto.setName(entity.getName()); return dto; } }
... import li.l1t.tingo.model.Teacher; import li.l1t.tingo.model.dto.TeacherDto; import li.l1t.tingo.model.repo.TeacherRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; ... */ @Service public class TeacherService { @Autowired private TeacherRepository teacherRepository; public Iterable<Teacher> getAllTeachers() { return teacherRepository.findAll(); ... } public TeacherDto toDto(Teacher entity) { TeacherDto dto = new TeacherDto(); dto.setId(entity.getId()); dto.setAbbreviation(entity.getAbbreviation()); dto.setName(entity.getName()); return dto; } } ...
d00dc21fdb784defab7376b8aa0c45db94be5c97
src/gpu/vk/GrVkVulkan.h
src/gpu/vk/GrVkVulkan.h
/* * Copyright 2018 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef GrVkVulkan_DEFINED #define GrVkVulkan_DEFINED #include "SkTypes.h" #ifdef VULKAN_CORE_H_ #error "Skia's private vulkan header must be included before any other vulkan header." #endif #include "../../third_party/vulkan/vulkan/vulkan_core.h" #ifdef SK_BUILD_FOR_ANDROID #ifdef VULKAN_ANDROID_H_ #error "Skia's private vulkan android header must be included before any other vulkan header." #endif // This is needed to get android extensions for external memory #include "../../third_party/vulkan/vulkan/vulkan_android.h" #endif #endif
/* * Copyright 2018 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef GrVkVulkan_DEFINED #define GrVkVulkan_DEFINED #include "SkTypes.h" #ifdef VULKAN_CORE_H_ #error "Skia's private vulkan header must be included before any other vulkan header." #endif #include "../../../include/third_party/vulkan/vulkan/vulkan_core.h" #ifdef SK_BUILD_FOR_ANDROID #ifdef VULKAN_ANDROID_H_ #error "Skia's private vulkan android header must be included before any other vulkan header." #endif // This is needed to get android extensions for external memory #include "../../../include/third_party/vulkan/vulkan/vulkan_android.h" #endif #endif
Fix path to vulkan header.
Fix path to vulkan header. Bug: skia: Change-Id: I47cb8f67b378a51cefcb82864eda467ff6b48a7e Reviewed-on: https://skia-review.googlesource.com/c/176969 Commit-Queue: Greg Daniel <[email protected]> Commit-Queue: Mike Klein <[email protected]> Auto-Submit: Greg Daniel <[email protected]> Reviewed-by: Mike Klein <[email protected]>
C
bsd-3-clause
HalCanary/skia-hc,aosp-mirror/platform_external_skia,google/skia,google/skia,Hikari-no-Tenshi/android_external_skia,google/skia,rubenvb/skia,Hikari-no-Tenshi/android_external_skia,rubenvb/skia,HalCanary/skia-hc,HalCanary/skia-hc,rubenvb/skia,HalCanary/skia-hc,google/skia,aosp-mirror/platform_external_skia,Hikari-no-Tenshi/android_external_skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,Hikari-no-Tenshi/android_external_skia,google/skia,Hikari-no-Tenshi/android_external_skia,HalCanary/skia-hc,HalCanary/skia-hc,rubenvb/skia,google/skia,rubenvb/skia,google/skia,aosp-mirror/platform_external_skia,rubenvb/skia,aosp-mirror/platform_external_skia,Hikari-no-Tenshi/android_external_skia,aosp-mirror/platform_external_skia,rubenvb/skia,aosp-mirror/platform_external_skia,rubenvb/skia,google/skia,Hikari-no-Tenshi/android_external_skia,rubenvb/skia,rubenvb/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,HalCanary/skia-hc,Hikari-no-Tenshi/android_external_skia
c
## Code Before: /* * Copyright 2018 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef GrVkVulkan_DEFINED #define GrVkVulkan_DEFINED #include "SkTypes.h" #ifdef VULKAN_CORE_H_ #error "Skia's private vulkan header must be included before any other vulkan header." #endif #include "../../third_party/vulkan/vulkan/vulkan_core.h" #ifdef SK_BUILD_FOR_ANDROID #ifdef VULKAN_ANDROID_H_ #error "Skia's private vulkan android header must be included before any other vulkan header." #endif // This is needed to get android extensions for external memory #include "../../third_party/vulkan/vulkan/vulkan_android.h" #endif #endif ## Instruction: Fix path to vulkan header. Bug: skia: Change-Id: I47cb8f67b378a51cefcb82864eda467ff6b48a7e Reviewed-on: https://skia-review.googlesource.com/c/176969 Commit-Queue: Greg Daniel <[email protected]> Commit-Queue: Mike Klein <[email protected]> Auto-Submit: Greg Daniel <[email protected]> Reviewed-by: Mike Klein <[email protected]> ## Code After: /* * Copyright 2018 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef GrVkVulkan_DEFINED #define GrVkVulkan_DEFINED #include "SkTypes.h" #ifdef VULKAN_CORE_H_ #error "Skia's private vulkan header must be included before any other vulkan header." #endif #include "../../../include/third_party/vulkan/vulkan/vulkan_core.h" #ifdef SK_BUILD_FOR_ANDROID #ifdef VULKAN_ANDROID_H_ #error "Skia's private vulkan android header must be included before any other vulkan header." #endif // This is needed to get android extensions for external memory #include "../../../include/third_party/vulkan/vulkan/vulkan_android.h" #endif #endif
... #error "Skia's private vulkan header must be included before any other vulkan header." #endif #include "../../../include/third_party/vulkan/vulkan/vulkan_core.h" #ifdef SK_BUILD_FOR_ANDROID #ifdef VULKAN_ANDROID_H_ ... #error "Skia's private vulkan android header must be included before any other vulkan header." #endif // This is needed to get android extensions for external memory #include "../../../include/third_party/vulkan/vulkan/vulkan_android.h" #endif #endif ...
451e1a0909d474c787a356362b3b3cb06cd18682
beaform-core/src/main/java/beaform/events/FormulaCreatedEvent.java
beaform-core/src/main/java/beaform/events/FormulaCreatedEvent.java
package beaform.events; import java.text.SimpleDateFormat; import java.util.Date; import beaform.utilities.SystemTime; public class FormulaCreatedEvent implements Event { private static final String EVENT_TYPE = "FormulaCreated"; private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SS"); private final String name; private final long timestamp; public FormulaCreatedEvent(String name) { this.name = name; this.timestamp = SystemTime.getAsLong(); } @Override public String toEventString() { final String formattedTimeStamp = dateFormat.format(new Date(this.timestamp)); return "[" + formattedTimeStamp + "] " + EVENT_TYPE + " " + this.name; } }
package beaform.events; import java.text.SimpleDateFormat; import java.util.Date; import beaform.utilities.SystemTime; public class FormulaCreatedEvent implements Event { private static final String EVENT_TYPE = "FormulaCreated"; private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd HH:mm:ss.SS"; private final String name; private final long timestamp; public FormulaCreatedEvent(String name) { this.name = name; this.timestamp = SystemTime.getAsLong(); } @Override public String toEventString() { final SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); final String formattedTimeStamp = dateFormat.format(new Date(this.timestamp)); return "[" + formattedTimeStamp + "] " + EVENT_TYPE + " " + this.name; } }
Improve thread safety by not using a static instance of SimpleDateFormat
Improve thread safety by not using a static instance of SimpleDateFormat
Java
mit
stevenpost/beaform
java
## Code Before: package beaform.events; import java.text.SimpleDateFormat; import java.util.Date; import beaform.utilities.SystemTime; public class FormulaCreatedEvent implements Event { private static final String EVENT_TYPE = "FormulaCreated"; private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SS"); private final String name; private final long timestamp; public FormulaCreatedEvent(String name) { this.name = name; this.timestamp = SystemTime.getAsLong(); } @Override public String toEventString() { final String formattedTimeStamp = dateFormat.format(new Date(this.timestamp)); return "[" + formattedTimeStamp + "] " + EVENT_TYPE + " " + this.name; } } ## Instruction: Improve thread safety by not using a static instance of SimpleDateFormat ## Code After: package beaform.events; import java.text.SimpleDateFormat; import java.util.Date; import beaform.utilities.SystemTime; public class FormulaCreatedEvent implements Event { private static final String EVENT_TYPE = "FormulaCreated"; private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd HH:mm:ss.SS"; private final String name; private final long timestamp; public FormulaCreatedEvent(String name) { this.name = name; this.timestamp = SystemTime.getAsLong(); } @Override public String toEventString() { final SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); final String formattedTimeStamp = dateFormat.format(new Date(this.timestamp)); return "[" + formattedTimeStamp + "] " + EVENT_TYPE + " " + this.name; } }
// ... existing code ... public class FormulaCreatedEvent implements Event { private static final String EVENT_TYPE = "FormulaCreated"; private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd HH:mm:ss.SS"; private final String name; private final long timestamp; // ... modified code ... @Override public String toEventString() { final SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); final String formattedTimeStamp = dateFormat.format(new Date(this.timestamp)); return "[" + formattedTimeStamp + "] " + EVENT_TYPE + " " + this.name; } // ... rest of the code ...
d5c65f6ac2cdae3310f41efb9ab0a6d5cae63357
kopytka/managers.py
kopytka/managers.py
from django.db import models class PageQuerySet(models.QuerySet): def published(self): return self.filter(is_published=True)
from django.db import models from .transforms import SKeys class PageQuerySet(models.QuerySet): def published(self): return self.filter(is_published=True) def fragment_keys(self): return self.annotate(keys=SKeys('fragments')).values_list('keys', flat=True)
Add fragment_keys method to PageQuerySet
Add fragment_keys method to PageQuerySet
Python
mit
funkybob/kopytka,funkybob/kopytka,funkybob/kopytka
python
## Code Before: from django.db import models class PageQuerySet(models.QuerySet): def published(self): return self.filter(is_published=True) ## Instruction: Add fragment_keys method to PageQuerySet ## Code After: from django.db import models from .transforms import SKeys class PageQuerySet(models.QuerySet): def published(self): return self.filter(is_published=True) def fragment_keys(self): return self.annotate(keys=SKeys('fragments')).values_list('keys', flat=True)
// ... existing code ... from django.db import models from .transforms import SKeys class PageQuerySet(models.QuerySet): // ... modified code ... def published(self): return self.filter(is_published=True) def fragment_keys(self): return self.annotate(keys=SKeys('fragments')).values_list('keys', flat=True) // ... rest of the code ...
96d798685c53f4568edaaf990b0bbe8e2e10e24a
tests_tf/test_mnist_tutorial_jsma.py
tests_tf/test_mnist_tutorial_jsma.py
import unittest class TestMNISTTutorialJSMA(unittest.TestCase): def test_mnist_tutorial_jsma(self): from tutorials import mnist_tutorial_jsma # Run the MNIST tutorial on a dataset of reduced size # and disable visualization. jsma_tutorial_args = {'train_start': 0, 'train_end': 10000, 'test_start': 0, 'test_end': 1666, 'viz_enabled': False, 'source_samples': 1, 'nb_epochs': 2} report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args) print(report.clean_train_adv_eval) # Check accuracy values contained in the AccuracyReport object self.assertTrue(report.clean_train_clean_eval > 0.75) self.assertTrue(report.clean_train_adv_eval < 0.05) # There is no adversarial training in the JSMA tutorial self.assertTrue(report.adv_train_clean_eval == 0.) self.assertTrue(report.adv_train_adv_eval == 0.) if __name__ == '__main__': unittest.main()
import unittest class TestMNISTTutorialJSMA(unittest.TestCase): def test_mnist_tutorial_jsma(self): from tutorials import mnist_tutorial_jsma # Run the MNIST tutorial on a dataset of reduced size # and disable visualization. jsma_tutorial_args = {'train_start': 0, 'train_end': 1000, 'test_start': 0, 'test_end': 1666, 'viz_enabled': False, 'source_samples': 1, 'nb_epochs': 2} report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args) # Check accuracy values contained in the AccuracyReport object self.assertTrue(report.clean_train_clean_eval > 0.75) self.assertTrue(report.clean_train_adv_eval < 0.05) # There is no adversarial training in the JSMA tutorial self.assertTrue(report.adv_train_clean_eval == 0.) self.assertTrue(report.adv_train_adv_eval == 0.) if __name__ == '__main__': unittest.main()
Update JSMA test tutorial constant
Update JSMA test tutorial constant
Python
mit
cleverhans-lab/cleverhans,cleverhans-lab/cleverhans,openai/cleverhans,carlini/cleverhans,cihangxie/cleverhans,cleverhans-lab/cleverhans,carlini/cleverhans,fartashf/cleverhans
python
## Code Before: import unittest class TestMNISTTutorialJSMA(unittest.TestCase): def test_mnist_tutorial_jsma(self): from tutorials import mnist_tutorial_jsma # Run the MNIST tutorial on a dataset of reduced size # and disable visualization. jsma_tutorial_args = {'train_start': 0, 'train_end': 10000, 'test_start': 0, 'test_end': 1666, 'viz_enabled': False, 'source_samples': 1, 'nb_epochs': 2} report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args) print(report.clean_train_adv_eval) # Check accuracy values contained in the AccuracyReport object self.assertTrue(report.clean_train_clean_eval > 0.75) self.assertTrue(report.clean_train_adv_eval < 0.05) # There is no adversarial training in the JSMA tutorial self.assertTrue(report.adv_train_clean_eval == 0.) self.assertTrue(report.adv_train_adv_eval == 0.) if __name__ == '__main__': unittest.main() ## Instruction: Update JSMA test tutorial constant ## Code After: import unittest class TestMNISTTutorialJSMA(unittest.TestCase): def test_mnist_tutorial_jsma(self): from tutorials import mnist_tutorial_jsma # Run the MNIST tutorial on a dataset of reduced size # and disable visualization. jsma_tutorial_args = {'train_start': 0, 'train_end': 1000, 'test_start': 0, 'test_end': 1666, 'viz_enabled': False, 'source_samples': 1, 'nb_epochs': 2} report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args) # Check accuracy values contained in the AccuracyReport object self.assertTrue(report.clean_train_clean_eval > 0.75) self.assertTrue(report.clean_train_adv_eval < 0.05) # There is no adversarial training in the JSMA tutorial self.assertTrue(report.adv_train_clean_eval == 0.) self.assertTrue(report.adv_train_adv_eval == 0.) if __name__ == '__main__': unittest.main()
// ... existing code ... # Run the MNIST tutorial on a dataset of reduced size # and disable visualization. jsma_tutorial_args = {'train_start': 0, 'train_end': 1000, 'test_start': 0, 'test_end': 1666, 'viz_enabled': False, // ... modified code ... 'nb_epochs': 2} report = mnist_tutorial_jsma.mnist_tutorial_jsma(**jsma_tutorial_args) # Check accuracy values contained in the AccuracyReport object self.assertTrue(report.clean_train_clean_eval > 0.75) self.assertTrue(report.clean_train_adv_eval < 0.05) // ... rest of the code ...
b62c8c905cdd332a0073ce462be3e5c5b17b282d
api/webview/views.py
api/webview/views.py
from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListCreateAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() class DocumentsFromSource(generics.ListCreateAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data)
from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() class DocumentsFromSource(generics.ListAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data)
Make the view List only remove Create
Make the view List only remove Create
Python
apache-2.0
erinspace/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,fabianvf/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,felliott/scrapi
python
## Code Before: from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListCreateAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() class DocumentsFromSource(generics.ListCreateAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data) ## Instruction: Make the view List only remove Create ## Code After: from rest_framework import generics from rest_framework import permissions from rest_framework.response import Response from rest_framework.decorators import api_view from django.views.decorators.clickjacking import xframe_options_exempt from api.webview.models import Document from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListAPIView): """ List all documents in the SHARE API """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return all documents """ return Document.objects.all() class DocumentsFromSource(generics.ListAPIView): """ List all documents from a particular source """ serializer_class = DocumentSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(source=self.request.user) def get_queryset(self): """ Return queryset based on source """ return Document.objects.filter(source=self.kwargs['source']) @api_view(['GET']) @xframe_options_exempt def document_detail(request, source, docID): """ Retrieve one particular document. """ try: all_sources = Document.objects.filter(source=source) document = all_sources.get(docID=docID) except Document.DoesNotExist: return Response(status=404) serializer = DocumentSerializer(document) return Response(serializer.data)
# ... existing code ... from api.webview.serializers import DocumentSerializer class DocumentList(generics.ListAPIView): """ List all documents in the SHARE API """ # ... modified code ... return Document.objects.all() class DocumentsFromSource(generics.ListAPIView): """ List all documents from a particular source """ # ... rest of the code ...
5c677c11b35dcb49b9b33807685284bfe9d86338
xgds_map_server/urls.py
xgds_map_server/urls.py
from django.conf.urls.defaults import * from xgds_map_server import settings from xgds_map_server.views import * urlpatterns = patterns( '', (r'^$', getMapListPage, {'readOnly': True}, 'xgds_map_server_index'), # Map server urls # HTML list of maps with description and links to individual maps, and a link to the kml feed (r'^list/', getMapListPage, {'readOnly': True}, 'mapList'), # This URL should receive a static files (r'^data/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.DATA_URL + settings.XGDS_MAP_SERVER_DATA_SUBDIR, 'show_indexes' : True, 'readOnly': True}, 'xgds_map_server_static'), # By default if you just load the app you should see the list (r'^feed/(?P<feedname>.*)', getMapFeed, {'readOnly': True, 'challenge': settings.SECURITY_GOOGLE_EARTH_CHALLENGE}, 'xgds_map_server_feed'), )
from django.conf.urls.defaults import * from xgds_map_server import settings from xgds_map_server.views import * urlpatterns = patterns( '', (r'^$', getMapListPage, {'readOnly': True}, 'xgds_map_server_index'), # Map server urls # HTML list of maps with description and links to individual maps, and a link to the kml feed (r'^list/', getMapListPage, {'readOnly': True}, 'mapList'), # This URL should receive a static files (r'^data/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.DATA_URL + settings.XGDS_MAP_SERVER_DATA_SUBDIR, 'show_indexes' : True, 'readOnly': True}, 'xgds_map_server_static'), # By default if you just load the app you should see the list (r'^feed/(?P<feedname>.*)', getMapFeed, {'readOnly': True, 'loginRequired': False}, 'xgds_map_server_feed'), )
Tweak login required and auth settings to work with C3
Tweak login required and auth settings to work with C3
Python
apache-2.0
xgds/xgds_map_server,xgds/xgds_map_server,xgds/xgds_map_server
python
## Code Before: from django.conf.urls.defaults import * from xgds_map_server import settings from xgds_map_server.views import * urlpatterns = patterns( '', (r'^$', getMapListPage, {'readOnly': True}, 'xgds_map_server_index'), # Map server urls # HTML list of maps with description and links to individual maps, and a link to the kml feed (r'^list/', getMapListPage, {'readOnly': True}, 'mapList'), # This URL should receive a static files (r'^data/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.DATA_URL + settings.XGDS_MAP_SERVER_DATA_SUBDIR, 'show_indexes' : True, 'readOnly': True}, 'xgds_map_server_static'), # By default if you just load the app you should see the list (r'^feed/(?P<feedname>.*)', getMapFeed, {'readOnly': True, 'challenge': settings.SECURITY_GOOGLE_EARTH_CHALLENGE}, 'xgds_map_server_feed'), ) ## Instruction: Tweak login required and auth settings to work with C3 ## Code After: from django.conf.urls.defaults import * from xgds_map_server import settings from xgds_map_server.views import * urlpatterns = patterns( '', (r'^$', getMapListPage, {'readOnly': True}, 'xgds_map_server_index'), # Map server urls # HTML list of maps with description and links to individual maps, and a link to the kml feed (r'^list/', getMapListPage, {'readOnly': True}, 'mapList'), # This URL should receive a static files (r'^data/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.DATA_URL + settings.XGDS_MAP_SERVER_DATA_SUBDIR, 'show_indexes' : True, 'readOnly': True}, 'xgds_map_server_static'), # By default if you just load the app you should see the list (r'^feed/(?P<feedname>.*)', getMapFeed, {'readOnly': True, 'loginRequired': False}, 'xgds_map_server_feed'), )
# ... existing code ... # By default if you just load the app you should see the list (r'^feed/(?P<feedname>.*)', getMapFeed, {'readOnly': True, 'loginRequired': False}, 'xgds_map_server_feed'), ) # ... rest of the code ...
58701c0d750714f8ded53627b0f8c22f256376c6
setup.py
setup.py
from setuptools import setup, find_packages setup( name='basicdb', version='0.1', description='Basic database service', long_description=open('README.rst', 'r').read(), author='Soren Hansen', author_email='[email protected]', url='http://github.com/sorenh/basicdb', packages=find_packages(), include_package_data=True, license='Apache 2.0', keywords='basicdb simpledb')
from setuptools import setup, find_packages import pkg_resources setup( name='basicdb', version='0.1', description='Basic database service', long_description=pkg_resources.resource_string(__name__, "README.rst"), author='Soren Hansen', author_email='[email protected]', url='http://github.com/sorenh/basicdb', packages=find_packages(), include_package_data=True, license='Apache 2.0', keywords='basicdb simpledb')
Use pkg_resources to read README.rst
Use pkg_resources to read README.rst
Python
apache-2.0
JioCloud/basicdb,varunarya10/basicdb,sorenh/basicdb
python
## Code Before: from setuptools import setup, find_packages setup( name='basicdb', version='0.1', description='Basic database service', long_description=open('README.rst', 'r').read(), author='Soren Hansen', author_email='[email protected]', url='http://github.com/sorenh/basicdb', packages=find_packages(), include_package_data=True, license='Apache 2.0', keywords='basicdb simpledb') ## Instruction: Use pkg_resources to read README.rst ## Code After: from setuptools import setup, find_packages import pkg_resources setup( name='basicdb', version='0.1', description='Basic database service', long_description=pkg_resources.resource_string(__name__, "README.rst"), author='Soren Hansen', author_email='[email protected]', url='http://github.com/sorenh/basicdb', packages=find_packages(), include_package_data=True, license='Apache 2.0', keywords='basicdb simpledb')
// ... existing code ... from setuptools import setup, find_packages import pkg_resources setup( name='basicdb', version='0.1', description='Basic database service', long_description=pkg_resources.resource_string(__name__, "README.rst"), author='Soren Hansen', author_email='[email protected]', url='http://github.com/sorenh/basicdb', // ... rest of the code ...
a6e2c0fc837b17321e2979cb12ba2d0e69603eac
orderedmodel/__init__.py
orderedmodel/__init__.py
__all__ = ['OrderedModel', 'OrderedModelAdmin'] from models import OrderedModel from admin import OrderedModelAdmin
from .models import OrderedModel from .admin import OrderedModelAdmin __all__ = ['OrderedModel', 'OrderedModelAdmin'] try: from django.conf import settings except ImportError: pass else: if 'mptt' in settings.INSTALLED_APPS: from .mptt_models import OrderableMPTTModel from .mptt_admin import OrderedMPTTModelAdmin __all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin']
Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
Python
bsd-3-clause
MagicSolutions/django-orderedmodel,MagicSolutions/django-orderedmodel
python
## Code Before: __all__ = ['OrderedModel', 'OrderedModelAdmin'] from models import OrderedModel from admin import OrderedModelAdmin ## Instruction: Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module ## Code After: from .models import OrderedModel from .admin import OrderedModelAdmin __all__ = ['OrderedModel', 'OrderedModelAdmin'] try: from django.conf import settings except ImportError: pass else: if 'mptt' in settings.INSTALLED_APPS: from .mptt_models import OrderableMPTTModel from .mptt_admin import OrderedMPTTModelAdmin __all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin']
// ... existing code ... from .models import OrderedModel from .admin import OrderedModelAdmin __all__ = ['OrderedModel', 'OrderedModelAdmin'] try: from django.conf import settings except ImportError: pass else: if 'mptt' in settings.INSTALLED_APPS: from .mptt_models import OrderableMPTTModel from .mptt_admin import OrderedMPTTModelAdmin __all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin'] // ... rest of the code ...
d6a67a94cacab93463f2a15fc5d2a2fadae2ad83
site/tests/test_unittest.py
site/tests/test_unittest.py
import unittest class IntegerArithmenticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) unittest.main(exit=False)
import unittest class IntegerArithmeticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase) unittest.TextTestRunner(verbosity=0).run(suite)
Change unittest test in test suite : it is not run in module __main__
Change unittest test in test suite : it is not run in module __main__
Python
bsd-3-clause
Hasimir/brython,olemis/brython,JohnDenker/brython,firmlyjin/brython,Isendir/brython,jonathanverner/brython,Mozhuowen/brython,olemis/brython,brython-dev/brython,Lh4cKg/brython,Isendir/brython,kevinmel2000/brython,amrdraz/brython,jonathanverner/brython,molebot/brython,Mozhuowen/brython,jonathanverner/brython,JohnDenker/brython,olemis/brython,jonathanverner/brython,Hasimir/brython,kikocorreoso/brython,brython-dev/brython,Hasimir/brython,firmlyjin/brython,kevinmel2000/brython,Lh4cKg/brython,molebot/brython,amrdraz/brython,rubyinhell/brython,kikocorreoso/brython,Mozhuowen/brython,Isendir/brython,firmlyjin/brython,kevinmel2000/brython,Mozhuowen/brython,firmlyjin/brython,JohnDenker/brython,olemis/brython,molebot/brython,amrdraz/brython,brython-dev/brython,Isendir/brython,JohnDenker/brython,rubyinhell/brython,kikocorreoso/brython,firmlyjin/brython,Hasimir/brython,kevinmel2000/brython,molebot/brython,Lh4cKg/brython,rubyinhell/brython,rubyinhell/brython,amrdraz/brython,Lh4cKg/brython
python
## Code Before: import unittest class IntegerArithmenticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) unittest.main(exit=False) ## Instruction: Change unittest test in test suite : it is not run in module __main__ ## Code After: import unittest class IntegerArithmeticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) def testMultiply(self): self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase) unittest.TextTestRunner(verbosity=0).run(suite)
// ... existing code ... import unittest class IntegerArithmeticTestCase(unittest.TestCase): def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1) // ... modified code ... self.assertEqual((0 * 10), 0) self.assertEqual((5 * 8), 40) suite = unittest.TestLoader().loadTestsFromTestCase(IntegerArithmeticTestCase) unittest.TextTestRunner(verbosity=0).run(suite) // ... rest of the code ...
278b17859e4ad7464098a715777fcb755acf258c
doTranscode.py
doTranscode.py
import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile() inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ
import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile(suffix=".wav") inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ
Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file
Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file
Python
isc
jeffayle/Transcode
python
## Code Before: import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile() inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ ## Instruction: Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file ## Code After: import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile(suffix=".wav") inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ
# ... existing code ... #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile(suffix=".wav") inF_real = decoders.decode(inF, f.name) if not inF_real: return False # ... rest of the code ...
4b3919cc92218092fa5fafd8b9e6f6826dca9817
src/main/java/com/parquet/Test.java
src/main/java/com/parquet/Test.java
package com.parquet; import com.richstonedt.nokia_api.peopleflow.Record; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.api.java.function.MapPartitionsFunction; import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import java.util.Iterator; /** * Created by yangpy on 2017/8/7. */ public class Test { public static void main(String[] args) { SparkSession spark = SparkSession .builder() .appName("Java Spark SQL basic example") .config("spark.some.config.option", "some-value") .master("local[4]") .getOrCreate(); Encoder<Record> recordEncoder = Encoders.bean(Record.class); spark.read().textFile("D:\\data.json") .mapPartitions(new MapPartitionsFunction<String, Record>() { @Override public Iterator<Record> call(Iterator<String> iterator) throws Exception { return null; } }, recordEncoder) .write() .mode(SaveMode.Overwrite) .parquet("abc.parquet"); } }
package com.parquet; import com.richstonedt.nokia_api.peopleflow.Record; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.api.java.function.MapPartitionsFunction; import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import java.util.Iterator; /** * Created by yangpy on 2017/8/7. */ public class Test { public static void main(String[] args) { SparkSession spark = SparkSession .builder() .appName("Java Spark SQL basic example") .config("spark.some.config.option", "some-value") .master("local[4]") .getOrCreate(); Encoder<Record> recordEncoder = Encoders.bean(Record.class); spark.read().textFile("/highway/demodata/parquet/data.json") .mapPartitions(new MapPartitionsFunction<String, Record>() { @Override public Iterator<Record> call(Iterator<String> iterator) throws Exception { return null; } }, recordEncoder) .write() .mode(SaveMode.Overwrite) .parquet("abc.parquet"); } }
Modify data source to HDFS
Modify data source to HDFS
Java
apache-2.0
ypynetboy/SparkDemo
java
## Code Before: package com.parquet; import com.richstonedt.nokia_api.peopleflow.Record; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.api.java.function.MapPartitionsFunction; import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import java.util.Iterator; /** * Created by yangpy on 2017/8/7. */ public class Test { public static void main(String[] args) { SparkSession spark = SparkSession .builder() .appName("Java Spark SQL basic example") .config("spark.some.config.option", "some-value") .master("local[4]") .getOrCreate(); Encoder<Record> recordEncoder = Encoders.bean(Record.class); spark.read().textFile("D:\\data.json") .mapPartitions(new MapPartitionsFunction<String, Record>() { @Override public Iterator<Record> call(Iterator<String> iterator) throws Exception { return null; } }, recordEncoder) .write() .mode(SaveMode.Overwrite) .parquet("abc.parquet"); } } ## Instruction: Modify data source to HDFS ## Code After: package com.parquet; import com.richstonedt.nokia_api.peopleflow.Record; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.api.java.function.MapPartitionsFunction; import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import java.util.Iterator; /** * Created by yangpy on 2017/8/7. */ public class Test { public static void main(String[] args) { SparkSession spark = SparkSession .builder() .appName("Java Spark SQL basic example") .config("spark.some.config.option", "some-value") .master("local[4]") .getOrCreate(); Encoder<Record> recordEncoder = Encoders.bean(Record.class); spark.read().textFile("/highway/demodata/parquet/data.json") .mapPartitions(new MapPartitionsFunction<String, Record>() { @Override public Iterator<Record> call(Iterator<String> iterator) throws Exception { return null; } }, recordEncoder) .write() .mode(SaveMode.Overwrite) .parquet("abc.parquet"); } }
... .getOrCreate(); Encoder<Record> recordEncoder = Encoders.bean(Record.class); spark.read().textFile("/highway/demodata/parquet/data.json") .mapPartitions(new MapPartitionsFunction<String, Record>() { @Override public Iterator<Record> call(Iterator<String> iterator) throws Exception { ...
724338c55d0af6d38a949b58a90ae200849247f4
cyinterval/test/test_interval_set.py
cyinterval/test/test_interval_set.py
from cyinterval.cyinterval import Interval, IntervalSet from nose.tools import assert_equal def test_interval_set_construction(): interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.)) assert_equal(interval_set.intervals[0], Interval(0.,1.)) assert_equal(interval_set.intervals[1], Interval(2.,3.)) if __name__ == '__main__': import sys import nose # This code will run the test in this file.' module_name = sys.modules[__name__].__file__ result = nose.run(argv=[sys.argv[0], module_name, '-s', '-v'])
from cyinterval.cyinterval import Interval, IntervalSet, FloatIntervalSet from nose.tools import assert_equal, assert_is def test_float_interval_set_construction(): interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.)) assert_equal(interval_set.intervals[0], Interval(0.,1.)) assert_equal(interval_set.intervals[1], Interval(2.,3.)) assert_is(type(interval_set), FloatIntervalSet) if __name__ == '__main__': import sys import nose # This code will run the test in this file.' module_name = sys.modules[__name__].__file__ result = nose.run(argv=[sys.argv[0], module_name, '-s', '-v'])
Test type of IntervalSet factory output
Test type of IntervalSet factory output
Python
mit
jcrudy/cyinterval
python
## Code Before: from cyinterval.cyinterval import Interval, IntervalSet from nose.tools import assert_equal def test_interval_set_construction(): interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.)) assert_equal(interval_set.intervals[0], Interval(0.,1.)) assert_equal(interval_set.intervals[1], Interval(2.,3.)) if __name__ == '__main__': import sys import nose # This code will run the test in this file.' module_name = sys.modules[__name__].__file__ result = nose.run(argv=[sys.argv[0], module_name, '-s', '-v']) ## Instruction: Test type of IntervalSet factory output ## Code After: from cyinterval.cyinterval import Interval, IntervalSet, FloatIntervalSet from nose.tools import assert_equal, assert_is def test_float_interval_set_construction(): interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.)) assert_equal(interval_set.intervals[0], Interval(0.,1.)) assert_equal(interval_set.intervals[1], Interval(2.,3.)) assert_is(type(interval_set), FloatIntervalSet) if __name__ == '__main__': import sys import nose # This code will run the test in this file.' module_name = sys.modules[__name__].__file__ result = nose.run(argv=[sys.argv[0], module_name, '-s', '-v'])
// ... existing code ... from cyinterval.cyinterval import Interval, IntervalSet, FloatIntervalSet from nose.tools import assert_equal, assert_is def test_float_interval_set_construction(): interval_set = IntervalSet(Interval(0.,1.), Interval(2.,3.)) assert_equal(interval_set.intervals[0], Interval(0.,1.)) assert_equal(interval_set.intervals[1], Interval(2.,3.)) assert_is(type(interval_set), FloatIntervalSet) if __name__ == '__main__': import sys // ... rest of the code ...
bfc0ce1298b9fe7a640dc31c6e5729d1c6360945
langs/pystartup.py
langs/pystartup.py
import atexit import os import readline import rlcompleter historyPath = os.path.expanduser("~/.pyhistory") def save_history(path=historyPath): import readline readline.write_history_file(path) if os.path.exists(historyPath): readline.read_history_file(historyPath) readline.parse_and_bind("tab: complete") readline.parse_and_bind(r"\C-a: beginning-of-line") readline.parse_and_bind(r"\C-e: end-of-line") atexit.register(save_history) del os, atexit, readline, rlcompleter, save_history, historyPath # vim: ft=python
import atexit import os import readline import rlcompleter historyPath = os.path.expanduser("~/.pyhistory") def save_history(path=historyPath): import readline readline.write_history_file(path) if os.path.exists(historyPath): readline.read_history_file(historyPath) try: import __builtin__ except ImportError: import builtins as __builtin__ __builtin__.true = True __builtin__.false = False __builtin__.null = None readline.parse_and_bind("tab: complete") readline.parse_and_bind(r"\C-a: beginning-of-line") readline.parse_and_bind(r"\C-e: end-of-line") atexit.register(save_history) del os, atexit, readline, rlcompleter, save_history, historyPath # vim: ft=python
Add json true/false/none in python repl
Add json true/false/none in python repl https://mobile.twitter.com/mitsuhiko/status/1229385843585974272/photo/2
Python
mit
keith/dotfiles,keith/dotfiles,keith/dotfiles,keith/dotfiles,keith/dotfiles,keith/dotfiles
python
## Code Before: import atexit import os import readline import rlcompleter historyPath = os.path.expanduser("~/.pyhistory") def save_history(path=historyPath): import readline readline.write_history_file(path) if os.path.exists(historyPath): readline.read_history_file(historyPath) readline.parse_and_bind("tab: complete") readline.parse_and_bind(r"\C-a: beginning-of-line") readline.parse_and_bind(r"\C-e: end-of-line") atexit.register(save_history) del os, atexit, readline, rlcompleter, save_history, historyPath # vim: ft=python ## Instruction: Add json true/false/none in python repl https://mobile.twitter.com/mitsuhiko/status/1229385843585974272/photo/2 ## Code After: import atexit import os import readline import rlcompleter historyPath = os.path.expanduser("~/.pyhistory") def save_history(path=historyPath): import readline readline.write_history_file(path) if os.path.exists(historyPath): readline.read_history_file(historyPath) try: import __builtin__ except ImportError: import builtins as __builtin__ __builtin__.true = True __builtin__.false = False __builtin__.null = None readline.parse_and_bind("tab: complete") readline.parse_and_bind(r"\C-a: beginning-of-line") readline.parse_and_bind(r"\C-e: end-of-line") atexit.register(save_history) del os, atexit, readline, rlcompleter, save_history, historyPath # vim: ft=python
... if os.path.exists(historyPath): readline.read_history_file(historyPath) try: import __builtin__ except ImportError: import builtins as __builtin__ __builtin__.true = True __builtin__.false = False __builtin__.null = None readline.parse_and_bind("tab: complete") readline.parse_and_bind(r"\C-a: beginning-of-line") readline.parse_and_bind(r"\C-e: end-of-line") ...
b717696b5cff69e3586e06c399be7d06c057e503
nova/tests/fake_utils.py
nova/tests/fake_utils.py
"""This modules stubs out functions in nova.utils.""" from nova import utils def stub_out_utils_spawn_n(stubs): """Stubs out spawn_n with a blocking version. This aids testing async processes by blocking until they're done. """ def no_spawn(func, *args, **kwargs): return func(*args, **kwargs) stubs.Set(utils, 'spawn_n', no_spawn)
"""This modules stubs out functions in nova.utils.""" from nova import utils def stub_out_utils_spawn_n(stubs): """Stubs out spawn_n with a blocking version. This aids testing async processes by blocking until they're done. """ def no_spawn(func, *args, **kwargs): try: return func(*args, **kwargs) except Exception: # NOTE(danms): This is supposed to simulate spawning # of a thread, which would run separate from the parent, # and die silently on error. If we don't catch and discard # any exceptions here, we're not honoring the usual # behavior. pass stubs.Set(utils, 'spawn_n', no_spawn)
Make spawn_n() stub properly ignore errors in the child thread work
Make spawn_n() stub properly ignore errors in the child thread work When we call spawn_n() normally, we fork off a thread that can run or die on its own, without affecting the parent. In unit tests, we stub this out to be a synchronous call, but we allow any exceptions that occur in that work to bubble up to the caller. This is not normal behavior and thus we should discard any such exceptions in order to mimic actual behavior of a child thread. Change-Id: I35ab21e9525aa76cced797436daa0b99a4fa99f2 Related-bug: #1349147
Python
apache-2.0
barnsnake351/nova,dawnpower/nova,alvarolopez/nova,JioCloud/nova_test_latest,joker946/nova,apporc/nova,cyx1231st/nova,dims/nova,klmitch/nova,mgagne/nova,openstack/nova,orbitfp7/nova,phenoxim/nova,rajalokan/nova,Stavitsky/nova,akash1808/nova_test_latest,apporc/nova,projectcalico/calico-nova,devendermishrajio/nova_test_latest,noironetworks/nova,maelnor/nova,j-carpentier/nova,dims/nova,ted-gould/nova,tangfeixiong/nova,zaina/nova,rajalokan/nova,rahulunair/nova,takeshineshiro/nova,LoHChina/nova,cloudbase/nova,fnordahl/nova,mikalstill/nova,fnordahl/nova,viggates/nova,gooddata/openstack-nova,vmturbo/nova,saleemjaveds/https-github.com-openstack-nova,alaski/nova,Metaswitch/calico-nova,JioCloud/nova_test_latest,devendermishrajio/nova,belmiromoreira/nova,eonpatapon/nova,cloudbase/nova-virtualbox,felixma/nova,JioCloud/nova,CEG-FYP-OpenStack/scheduler,alaski/nova,thomasem/nova,hanlind/nova,noironetworks/nova,klmitch/nova,edulramirez/nova,isyippee/nova,mmnelemane/nova,jeffrey4l/nova,CloudServer/nova,silenceli/nova,eayunstack/nova,sebrandon1/nova,tealover/nova,scripnichenko/nova,Francis-Liu/animated-broccoli,projectcalico/calico-nova,angdraug/nova,tianweizhang/nova,rahulunair/nova,BeyondTheClouds/nova,scripnichenko/nova,belmiromoreira/nova,iuliat/nova,berrange/nova,double12gzh/nova,viggates/nova,openstack/nova,mahak/nova,tangfeixiong/nova,nikesh-mahalka/nova,Stavitsky/nova,LoHChina/nova,zhimin711/nova,alexandrucoman/vbox-nova-driver,petrutlucian94/nova,klmitch/nova,angdraug/nova,CEG-FYP-OpenStack/scheduler,berrange/nova,tianweizhang/nova,devendermishrajio/nova,petrutlucian94/nova,vmturbo/nova,blueboxgroup/nova,badock/nova,adelina-t/nova,virtualopensystems/nova,nikesh-mahalka/nova,redhat-openstack/nova,ruslanloman/nova,yosshy/nova,mmnelemane/nova,NeCTAR-RC/nova,whitepages/nova,varunarya10/nova_test_latest,jianghuaw/nova,bgxavier/nova,affo/nova,jeffrey4l/nova,blueboxgroup/nova,dawnpower/nova,hanlind/nova,barnsnake351/nova,akash1808/nova,sebrandon1/nova,Francis-Liu/animated-broccoli,eayunstack/nova,cernops/nova,mandeepdhami/nova,phenoxim/nova,rajalokan/nova,Yusuke1987/openstack_template,sebrandon1/nova,JioCloud/nova,akash1808/nova_test_latest,affo/nova,joker946/nova,kimjaejoong/nova,Juniper/nova,MountainWei/nova,double12gzh/nova,mgagne/nova,mandeepdhami/nova,yosshy/nova,watonyweng/nova,cernops/nova,yatinkumbhare/openstack-nova,cyx1231st/nova,rajalokan/nova,zzicewind/nova,CCI-MOC/nova,yatinkumbhare/openstack-nova,BeyondTheClouds/nova,bigswitch/nova,jianghuaw/nova,redhat-openstack/nova,vladikr/nova_drafts,Tehsmash/nova,cloudbase/nova,cernops/nova,JianyuWang/nova,orbitfp7/nova,devendermishrajio/nova_test_latest,felixma/nova,mahak/nova,Metaswitch/calico-nova,Tehsmash/nova,TwinkleChawla/nova,vladikr/nova_drafts,gooddata/openstack-nova,ruslanloman/nova,tealover/nova,BeyondTheClouds/nova,zaina/nova,watonyweng/nova,bigswitch/nova,MountainWei/nova,thomasem/nova,kimjaejoong/nova,bgxavier/nova,Juniper/nova,cloudbase/nova-virtualbox,alvarolopez/nova,TwinkleChawla/nova,raildo/nova,edulramirez/nova,ted-gould/nova,Juniper/nova,zhimin711/nova,saleemjaveds/https-github.com-openstack-nova,alexandrucoman/vbox-nova-driver,zzicewind/nova,tudorvio/nova,vmturbo/nova,virtualopensystems/nova,gooddata/openstack-nova,Juniper/nova,isyippee/nova,hanlind/nova,takeshineshiro/nova,silenceli/nova,gooddata/openstack-nova,NeCTAR-RC/nova,CloudServer/nova,eonpatapon/nova,shail2810/nova,Yusuke1987/openstack_template,CCI-MOC/nova,badock/nova,rahulunair/nova,shail2810/nova,mikalstill/nova,jianghuaw/nova,tudorvio/nova,varunarya10/nova_test_latest,klmitch/nova,maelnor/nova,iuliat/nova,j-carpentier/nova,whitepages/nova,akash1808/nova,jianghuaw/nova,adelina-t/nova,vmturbo/nova,openstack/nova,raildo/nova,cloudbase/nova,mikalstill/nova,JianyuWang/nova,mahak/nova
python
## Code Before: """This modules stubs out functions in nova.utils.""" from nova import utils def stub_out_utils_spawn_n(stubs): """Stubs out spawn_n with a blocking version. This aids testing async processes by blocking until they're done. """ def no_spawn(func, *args, **kwargs): return func(*args, **kwargs) stubs.Set(utils, 'spawn_n', no_spawn) ## Instruction: Make spawn_n() stub properly ignore errors in the child thread work When we call spawn_n() normally, we fork off a thread that can run or die on its own, without affecting the parent. In unit tests, we stub this out to be a synchronous call, but we allow any exceptions that occur in that work to bubble up to the caller. This is not normal behavior and thus we should discard any such exceptions in order to mimic actual behavior of a child thread. Change-Id: I35ab21e9525aa76cced797436daa0b99a4fa99f2 Related-bug: #1349147 ## Code After: """This modules stubs out functions in nova.utils.""" from nova import utils def stub_out_utils_spawn_n(stubs): """Stubs out spawn_n with a blocking version. This aids testing async processes by blocking until they're done. """ def no_spawn(func, *args, **kwargs): try: return func(*args, **kwargs) except Exception: # NOTE(danms): This is supposed to simulate spawning # of a thread, which would run separate from the parent, # and die silently on error. If we don't catch and discard # any exceptions here, we're not honoring the usual # behavior. pass stubs.Set(utils, 'spawn_n', no_spawn)
... This aids testing async processes by blocking until they're done. """ def no_spawn(func, *args, **kwargs): try: return func(*args, **kwargs) except Exception: # NOTE(danms): This is supposed to simulate spawning # of a thread, which would run separate from the parent, # and die silently on error. If we don't catch and discard # any exceptions here, we're not honoring the usual # behavior. pass stubs.Set(utils, 'spawn_n', no_spawn) ...
9c945162dfb60481c9f5d39c5e42617b030263a9
mailgun/models.py
mailgun/models.py
import api import db from utils import parse_timestamp import hashlib import json def download_logs(): """ Download mailgun logs and store them in the database """ logs = [] skip = 0 # Fetch all unsaved logs and add them to a LIFO queue while True: print("fecthing logs starting at {}".format(skip)) for log in api.get_logs(limit=100, skip=skip)['items']: log_data = json.dumps(log) log_hash = hashlib.sha256(log_data).hexdigest() if db.MailgunLog.objects.filter(log_hash=log_hash).exists(): break else: logs[:0] = [(log_hash, log_data, parse_timestamp(log['date_created']] else: break skip += 100 # take items from LIFO queue and save to db for log_hash, data, timestamp in logs: db.MailgunLog( log_hash=log_hash, data=data, timestamp=timestamp ).save()
import api import db from utils import parse_timestamp from django.db import transaction from collections import OrderedDict import hashlib import json @transaction.commit_manually def download_logs(): """ Download mailgun logs and store them in the database """ # use ordered dict to protect against new logs arriving while downloading logs logs = OrderedDict() skip = 0 # Fetch all unsaved logs and add them to a LIFO queue fetch_more = True while fetch_more: print("fecthing logs skip={}".format(skip)) logs_tmp = api.get_logs(limit=1000, skip=skip)['items'] if len(logs_tmp) == 0: break for log in logs_tmp: log_data = json.dumps(log) log_hash = hashlib.sha256(log_data).hexdigest() if db.MailgunLog.objects.filter(log_hash=log_hash).exists(): fetch_more = False break else: logs[log_hash] = (log_hash, log_data, parse_timestamp(log['created_at'])) skip += 1000 # take items from LIFO queue and save to db print("Saving {0} logs to database".format(len(logs))) for i, (log_hash, data, timestamp) in enumerate(logs.values()): db.MailgunLog( log_hash=log_hash, data=data, timestamp=timestamp ).save() if (i+1) % 100 == 0: transaction.commit() transaction.commit()
Handle transactions manually when saving downloaded logs
Handle transactions manually when saving downloaded logs
Python
mit
p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc
python
## Code Before: import api import db from utils import parse_timestamp import hashlib import json def download_logs(): """ Download mailgun logs and store them in the database """ logs = [] skip = 0 # Fetch all unsaved logs and add them to a LIFO queue while True: print("fecthing logs starting at {}".format(skip)) for log in api.get_logs(limit=100, skip=skip)['items']: log_data = json.dumps(log) log_hash = hashlib.sha256(log_data).hexdigest() if db.MailgunLog.objects.filter(log_hash=log_hash).exists(): break else: logs[:0] = [(log_hash, log_data, parse_timestamp(log['date_created']] else: break skip += 100 # take items from LIFO queue and save to db for log_hash, data, timestamp in logs: db.MailgunLog( log_hash=log_hash, data=data, timestamp=timestamp ).save() ## Instruction: Handle transactions manually when saving downloaded logs ## Code After: import api import db from utils import parse_timestamp from django.db import transaction from collections import OrderedDict import hashlib import json @transaction.commit_manually def download_logs(): """ Download mailgun logs and store them in the database """ # use ordered dict to protect against new logs arriving while downloading logs logs = OrderedDict() skip = 0 # Fetch all unsaved logs and add them to a LIFO queue fetch_more = True while fetch_more: print("fecthing logs skip={}".format(skip)) logs_tmp = api.get_logs(limit=1000, skip=skip)['items'] if len(logs_tmp) == 0: break for log in logs_tmp: log_data = json.dumps(log) log_hash = hashlib.sha256(log_data).hexdigest() if db.MailgunLog.objects.filter(log_hash=log_hash).exists(): fetch_more = False break else: logs[log_hash] = (log_hash, log_data, parse_timestamp(log['created_at'])) skip += 1000 # take items from LIFO queue and save to db print("Saving {0} logs to database".format(len(logs))) for i, (log_hash, data, timestamp) in enumerate(logs.values()): db.MailgunLog( log_hash=log_hash, data=data, timestamp=timestamp ).save() if (i+1) % 100 == 0: transaction.commit() transaction.commit()
# ... existing code ... import api import db from utils import parse_timestamp from django.db import transaction from collections import OrderedDict import hashlib import json @transaction.commit_manually def download_logs(): """ Download mailgun logs and store them in the database """ # use ordered dict to protect against new logs arriving while downloading logs logs = OrderedDict() skip = 0 # Fetch all unsaved logs and add them to a LIFO queue fetch_more = True while fetch_more: print("fecthing logs skip={}".format(skip)) logs_tmp = api.get_logs(limit=1000, skip=skip)['items'] if len(logs_tmp) == 0: break for log in logs_tmp: log_data = json.dumps(log) log_hash = hashlib.sha256(log_data).hexdigest() if db.MailgunLog.objects.filter(log_hash=log_hash).exists(): fetch_more = False break else: logs[log_hash] = (log_hash, log_data, parse_timestamp(log['created_at'])) skip += 1000 # take items from LIFO queue and save to db print("Saving {0} logs to database".format(len(logs))) for i, (log_hash, data, timestamp) in enumerate(logs.values()): db.MailgunLog( log_hash=log_hash, data=data, timestamp=timestamp ).save() if (i+1) % 100 == 0: transaction.commit() transaction.commit() # ... rest of the code ...
372d03b25f21d363138ecf340816dd04fb33ef71
docs/conf.py
docs/conf.py
extensions = [] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'django-soapbox' copyright = u'2011-2015, James Bennett' version = '1.1' release = '1.1' exclude_trees = ['_build'] pygments_style = 'sphinx' html_static_path = ['_static'] htmlhelp_basename = 'django-soapboxdoc' latex_documents = [ ('index', 'django-soapbox.tex', u'django-soapbox Documentation', u'James Bennett', 'manual'), ] html_theme = 'classic'
import os on_rtd = os.environ.get('READTHEDOCS', None) == 'True' extensions = [] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'django-soapbox' copyright = u'2011-2015, James Bennett' version = '1.1' release = '1.1' exclude_trees = ['_build'] pygments_style = 'sphinx' html_static_path = ['_static'] htmlhelp_basename = 'django-soapboxdoc' latex_documents = [ ('index', 'django-soapbox.tex', u'django-soapbox Documentation', u'James Bennett', 'manual'), ] if not on_rtd: import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
Switch to RTD docs theme.
Switch to RTD docs theme.
Python
bsd-3-clause
ubernostrum/django-soapbox,ubernostrum/django-soapbox
python
## Code Before: extensions = [] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'django-soapbox' copyright = u'2011-2015, James Bennett' version = '1.1' release = '1.1' exclude_trees = ['_build'] pygments_style = 'sphinx' html_static_path = ['_static'] htmlhelp_basename = 'django-soapboxdoc' latex_documents = [ ('index', 'django-soapbox.tex', u'django-soapbox Documentation', u'James Bennett', 'manual'), ] html_theme = 'classic' ## Instruction: Switch to RTD docs theme. ## Code After: import os on_rtd = os.environ.get('READTHEDOCS', None) == 'True' extensions = [] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'django-soapbox' copyright = u'2011-2015, James Bennett' version = '1.1' release = '1.1' exclude_trees = ['_build'] pygments_style = 'sphinx' html_static_path = ['_static'] htmlhelp_basename = 'django-soapboxdoc' latex_documents = [ ('index', 'django-soapbox.tex', u'django-soapbox Documentation', u'James Bennett', 'manual'), ] if not on_rtd: import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
... import os on_rtd = os.environ.get('READTHEDOCS', None) == 'True' extensions = [] templates_path = ['_templates'] source_suffix = '.rst' ... ('index', 'django-soapbox.tex', u'django-soapbox Documentation', u'James Bennett', 'manual'), ] if not on_rtd: import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] ...
380baa34af7e8a704780f0ec535b626f4a286e23
deflect/admin.py
deflect/admin.py
from django.contrib import admin from .models import RedirectURL class RedirectURLAdmin(admin.ModelAdmin): list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',) list_filter = ('creator__username', 'campaign', 'medium',) ordering = ('-last_used',) readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',) search_fields = ['url', 'campaign'] fieldsets = ((None, {'fields': ('url', 'short_url',)}), ('Google', {'fields': ('campaign', 'medium', 'content',)}), ('Additional info', {'fields': ('description', 'qr_code',)}), ('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),) def save_model(self, request, obj, form, change): obj.user = request.user obj.save() admin.site.register(RedirectURL, RedirectURLAdmin)
from django.contrib import admin from .models import RedirectURL class RedirectURLAdmin(admin.ModelAdmin): list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',) list_filter = ('creator__username', 'campaign', 'medium',) ordering = ('-last_used',) readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',) search_fields = ['url', 'campaign'] fieldsets = ((None, {'fields': ('url', 'short_url',)}), ('Google', {'fields': ('campaign', 'medium', 'content',)}), ('Additional info', {'fields': ('description', 'qr_code',)}), ('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),) def save_model(self, request, obj, form, change): if not change: obj.creator = request.user obj.save() admin.site.register(RedirectURL, RedirectURLAdmin)
Fix model creator updating on change event
Fix model creator updating on change event
Python
bsd-3-clause
jbittel/django-deflect
python
## Code Before: from django.contrib import admin from .models import RedirectURL class RedirectURLAdmin(admin.ModelAdmin): list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',) list_filter = ('creator__username', 'campaign', 'medium',) ordering = ('-last_used',) readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',) search_fields = ['url', 'campaign'] fieldsets = ((None, {'fields': ('url', 'short_url',)}), ('Google', {'fields': ('campaign', 'medium', 'content',)}), ('Additional info', {'fields': ('description', 'qr_code',)}), ('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),) def save_model(self, request, obj, form, change): obj.user = request.user obj.save() admin.site.register(RedirectURL, RedirectURLAdmin) ## Instruction: Fix model creator updating on change event ## Code After: from django.contrib import admin from .models import RedirectURL class RedirectURLAdmin(admin.ModelAdmin): list_display = ('url', 'short_url', 'hits', 'last_used', 'creator', 'campaign', 'medium',) list_filter = ('creator__username', 'campaign', 'medium',) ordering = ('-last_used',) readonly_fields = ('created', 'short_url', 'qr_code', 'hits', 'last_used',) search_fields = ['url', 'campaign'] fieldsets = ((None, {'fields': ('url', 'short_url',)}), ('Google', {'fields': ('campaign', 'medium', 'content',)}), ('Additional info', {'fields': ('description', 'qr_code',)}), ('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),) def save_model(self, request, obj, form, change): if not change: obj.creator = request.user obj.save() admin.site.register(RedirectURL, RedirectURLAdmin)
# ... existing code ... ('Short URL Usage', {'fields': ('hits', 'created', 'last_used',)}),) def save_model(self, request, obj, form, change): if not change: obj.creator = request.user obj.save() # ... rest of the code ...
1e5a956eb289b8333ecf3c3cc00f51295f37870a
api_tests/institutions/views/test_institution_users_list.py
api_tests/institutions/views/test_institution_users_list.py
from nose.tools import * # flake8: noqa from tests.base import ApiTestCase from osf_tests.factories import InstitutionFactory, UserFactory from api.base.settings.defaults import API_BASE class TestInstitutionUsersList(ApiTestCase): def setUp(self): super(TestInstitutionUsersList, self).setUp() self.institution = InstitutionFactory() self.user1 = UserFactory() self.user1.affiliated_institutions.add(self.institution) self.user1.save() self.user2 = UserFactory() self.user2.affiliated_institutions.add(self.institution) self.user2.save() self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id) def test_return_all_users(self): res = self.app.get(self.institution_user_url) assert_equal(res.status_code, 200) ids = [each['id'] for each in res.json['data']] assert_equal(len(res.json['data']), 2) assert_in(self.user1._id, ids) assert_in(self.user2._id, ids)
import pytest from api.base.settings.defaults import API_BASE from osf_tests.factories import ( InstitutionFactory, UserFactory, ) @pytest.mark.django_db class TestInstitutionUsersList: @pytest.fixture() def institution(self): return InstitutionFactory() @pytest.fixture() def user_one(self, institution): user_one = UserFactory() user_one.affiliated_institutions.add(institution) user_one.save() return user_one @pytest.fixture() def user_two(self, institution): user_two = UserFactory() user_two.affiliated_institutions.add(institution) user_two.save() return user_two @pytest.fixture() def url_institution_user(self, institution): return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id) def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user): res = app.get(url_institution_user) assert res.status_code == 200 ids = [each['id'] for each in res.json['data']] assert len(res.json['data']) == 2 assert user_one._id in ids assert user_two._id in ids
Convert institutions users to pytest
Convert institutions users to pytest
Python
apache-2.0
cslzchen/osf.io,chennan47/osf.io,crcresearch/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,icereval/osf.io,crcresearch/osf.io,cslzchen/osf.io,sloria/osf.io,felliott/osf.io,binoculars/osf.io,laurenrevere/osf.io,mfraezz/osf.io,felliott/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,crcresearch/osf.io,adlius/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,erinspace/osf.io,baylee-d/osf.io,erinspace/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,sloria/osf.io,mfraezz/osf.io,chrisseto/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,leb2dg/osf.io,mattclark/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,leb2dg/osf.io,felliott/osf.io,Johnetordoff/osf.io,chennan47/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,adlius/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,laurenrevere/osf.io,icereval/osf.io,aaxelb/osf.io,adlius/osf.io,aaxelb/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,chrisseto/osf.io,caneruguz/osf.io,pattisdr/osf.io,adlius/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,baylee-d/osf.io,aaxelb/osf.io,mfraezz/osf.io,saradbowman/osf.io,sloria/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,chennan47/osf.io,TomBaxter/osf.io,binoculars/osf.io,mfraezz/osf.io,felliott/osf.io,mattclark/osf.io
python
## Code Before: from nose.tools import * # flake8: noqa from tests.base import ApiTestCase from osf_tests.factories import InstitutionFactory, UserFactory from api.base.settings.defaults import API_BASE class TestInstitutionUsersList(ApiTestCase): def setUp(self): super(TestInstitutionUsersList, self).setUp() self.institution = InstitutionFactory() self.user1 = UserFactory() self.user1.affiliated_institutions.add(self.institution) self.user1.save() self.user2 = UserFactory() self.user2.affiliated_institutions.add(self.institution) self.user2.save() self.institution_user_url = '/{0}institutions/{1}/users/'.format(API_BASE, self.institution._id) def test_return_all_users(self): res = self.app.get(self.institution_user_url) assert_equal(res.status_code, 200) ids = [each['id'] for each in res.json['data']] assert_equal(len(res.json['data']), 2) assert_in(self.user1._id, ids) assert_in(self.user2._id, ids) ## Instruction: Convert institutions users to pytest ## Code After: import pytest from api.base.settings.defaults import API_BASE from osf_tests.factories import ( InstitutionFactory, UserFactory, ) @pytest.mark.django_db class TestInstitutionUsersList: @pytest.fixture() def institution(self): return InstitutionFactory() @pytest.fixture() def user_one(self, institution): user_one = UserFactory() user_one.affiliated_institutions.add(institution) user_one.save() return user_one @pytest.fixture() def user_two(self, institution): user_two = UserFactory() user_two.affiliated_institutions.add(institution) user_two.save() return user_two @pytest.fixture() def url_institution_user(self, institution): return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id) def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user): res = app.get(url_institution_user) assert res.status_code == 200 ids = [each['id'] for each in res.json['data']] assert len(res.json['data']) == 2 assert user_one._id in ids assert user_two._id in ids
# ... existing code ... import pytest from api.base.settings.defaults import API_BASE from osf_tests.factories import ( InstitutionFactory, UserFactory, ) @pytest.mark.django_db class TestInstitutionUsersList: @pytest.fixture() def institution(self): return InstitutionFactory() @pytest.fixture() def user_one(self, institution): user_one = UserFactory() user_one.affiliated_institutions.add(institution) user_one.save() return user_one @pytest.fixture() def user_two(self, institution): user_two = UserFactory() user_two.affiliated_institutions.add(institution) user_two.save() return user_two @pytest.fixture() def url_institution_user(self, institution): return '/{0}institutions/{1}/users/'.format(API_BASE, institution._id) def test_return_all_users(self, app, institution, user_one, user_two, url_institution_user): res = app.get(url_institution_user) assert res.status_code == 200 ids = [each['id'] for each in res.json['data']] assert len(res.json['data']) == 2 assert user_one._id in ids assert user_two._id in ids # ... rest of the code ...
69f51ba0acc7e5c0bc850305bb98da8a397f57c6
plugins/stats-collector/completion-ranker/src/com/jetbrains/completion/ranker/FallbackKotlinMLRankingProvider.kt
plugins/stats-collector/completion-ranker/src/com/jetbrains/completion/ranker/FallbackKotlinMLRankingProvider.kt
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.completion.ranker import com.intellij.ide.plugins.PluginManager import com.intellij.internal.ml.DecisionFunction import com.intellij.internal.ml.ModelMetadata import com.intellij.internal.ml.completion.CompletionRankingModelBase import com.intellij.internal.ml.completion.JarCompletionModelProvider import com.intellij.lang.Language import com.intellij.openapi.extensions.PluginId import com.jetbrains.completion.ranker.model.kotlin.MLGlassBox class FallbackKotlinMLRankingProvider : JarCompletionModelProvider("Kotlin", "kotlin_features"), WeakModelProvider { override fun createModel(metadata: ModelMetadata): DecisionFunction { return object : CompletionRankingModelBase(metadata) { override fun predict(features: DoubleArray?): Double = MLGlassBox.makePredict(features) } } override fun isLanguageSupported(language: Language): Boolean = language.id.compareTo("kotlin", ignoreCase = true) == 0 override fun canBeUsed(): Boolean { return PluginManager.getInstance().findEnabledPlugin(PluginId.findId(KOTLIN_PLUGIN_ID) ?: return false)?.isEnabled ?: false } override fun shouldReplace(): Boolean = false private companion object { private const val KOTLIN_PLUGIN_ID = "org.jetbrains.kotlin" } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.completion.ranker import com.intellij.ide.plugins.PluginManager import com.intellij.internal.ml.DecisionFunction import com.intellij.internal.ml.ModelMetadata import com.intellij.internal.ml.completion.CompletionRankingModelBase import com.intellij.internal.ml.completion.JarCompletionModelProvider import com.intellij.lang.Language import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.extensions.PluginId import com.jetbrains.completion.ranker.model.kotlin.MLGlassBox class FallbackKotlinMLRankingProvider : JarCompletionModelProvider("Kotlin", "kotlin_features"), WeakModelProvider { override fun createModel(metadata: ModelMetadata): DecisionFunction { return object : CompletionRankingModelBase(metadata) { override fun predict(features: DoubleArray?): Double = MLGlassBox.makePredict(features) } } override fun isLanguageSupported(language: Language): Boolean = language.id.compareTo("kotlin", ignoreCase = true) == 0 override fun canBeUsed(): Boolean { return PluginManager.getInstance().findEnabledPlugin(PluginId.findId(KOTLIN_PLUGIN_ID) ?: return false)?.isEnabled ?: false } override fun shouldReplace(): Boolean = ApplicationManager.getApplication().isEAP private companion object { private const val KOTLIN_PLUGIN_ID = "org.jetbrains.kotlin" } }
Use bundled kotlin model during experiment
Use bundled kotlin model during experiment GitOrigin-RevId: 192383778dc3a86d2be2a452e00c1f42c25a3103
Kotlin
apache-2.0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
kotlin
## Code Before: // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.completion.ranker import com.intellij.ide.plugins.PluginManager import com.intellij.internal.ml.DecisionFunction import com.intellij.internal.ml.ModelMetadata import com.intellij.internal.ml.completion.CompletionRankingModelBase import com.intellij.internal.ml.completion.JarCompletionModelProvider import com.intellij.lang.Language import com.intellij.openapi.extensions.PluginId import com.jetbrains.completion.ranker.model.kotlin.MLGlassBox class FallbackKotlinMLRankingProvider : JarCompletionModelProvider("Kotlin", "kotlin_features"), WeakModelProvider { override fun createModel(metadata: ModelMetadata): DecisionFunction { return object : CompletionRankingModelBase(metadata) { override fun predict(features: DoubleArray?): Double = MLGlassBox.makePredict(features) } } override fun isLanguageSupported(language: Language): Boolean = language.id.compareTo("kotlin", ignoreCase = true) == 0 override fun canBeUsed(): Boolean { return PluginManager.getInstance().findEnabledPlugin(PluginId.findId(KOTLIN_PLUGIN_ID) ?: return false)?.isEnabled ?: false } override fun shouldReplace(): Boolean = false private companion object { private const val KOTLIN_PLUGIN_ID = "org.jetbrains.kotlin" } } ## Instruction: Use bundled kotlin model during experiment GitOrigin-RevId: 192383778dc3a86d2be2a452e00c1f42c25a3103 ## Code After: // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.completion.ranker import com.intellij.ide.plugins.PluginManager import com.intellij.internal.ml.DecisionFunction import com.intellij.internal.ml.ModelMetadata import com.intellij.internal.ml.completion.CompletionRankingModelBase import com.intellij.internal.ml.completion.JarCompletionModelProvider import com.intellij.lang.Language import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.extensions.PluginId import com.jetbrains.completion.ranker.model.kotlin.MLGlassBox class FallbackKotlinMLRankingProvider : JarCompletionModelProvider("Kotlin", "kotlin_features"), WeakModelProvider { override fun createModel(metadata: ModelMetadata): DecisionFunction { return object : CompletionRankingModelBase(metadata) { override fun predict(features: DoubleArray?): Double = MLGlassBox.makePredict(features) } } override fun isLanguageSupported(language: Language): Boolean = language.id.compareTo("kotlin", ignoreCase = true) == 0 override fun canBeUsed(): Boolean { return PluginManager.getInstance().findEnabledPlugin(PluginId.findId(KOTLIN_PLUGIN_ID) ?: return false)?.isEnabled ?: false } override fun shouldReplace(): Boolean = ApplicationManager.getApplication().isEAP private companion object { private const val KOTLIN_PLUGIN_ID = "org.jetbrains.kotlin" } }
# ... existing code ... import com.intellij.internal.ml.completion.CompletionRankingModelBase import com.intellij.internal.ml.completion.JarCompletionModelProvider import com.intellij.lang.Language import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.extensions.PluginId import com.jetbrains.completion.ranker.model.kotlin.MLGlassBox # ... modified code ... return PluginManager.getInstance().findEnabledPlugin(PluginId.findId(KOTLIN_PLUGIN_ID) ?: return false)?.isEnabled ?: false } override fun shouldReplace(): Boolean = ApplicationManager.getApplication().isEAP private companion object { private const val KOTLIN_PLUGIN_ID = "org.jetbrains.kotlin" # ... rest of the code ...
1c2a0232ac34d8271284297adb16a0ee81a98438
src/globals/WMath.java
src/globals/WMath.java
package globals; import util.KawaWrap; public class WMath extends Globals { /** * Add methods related to (random n) * * @param kawa The interpreter to add them to. * @throws Throwable If anything breaks while adding them. */ @Override public void addMethods(KawaWrap kawa) throws Throwable { kawa.eval("(define (sub1 n) (- n 1))"); kawa.eval("(define (add1 n) (+ n 1))"); } }
package globals; import gnu.mapping.Procedure1; import gnu.math.IntNum; import util.KawaWrap; public class WMath extends Globals { /** * Add methods related to (random n) * * @param kawa The interpreter to add them to. * @throws Throwable If anything breaks while adding them. */ @Override public void addMethods(KawaWrap kawa) throws Throwable { kawa.eval("(define (sub1 n) (- n 1))"); kawa.eval("(define (add1 n) (+ n 1))"); kawa.bind(new Procedure1("even?") { @Override public Object apply1(Object arg1) throws Throwable { if (!(arg1 instanceof IntNum)) throw new IllegalArgumentException("Error in even?, expected integer as first argument, got '" + KawaWrap.formatObject(arg1) + "'."); return !((IntNum) arg1).isOdd(); } }); kawa.bind(new Procedure1("odd?") { @Override public Object apply1(Object arg1) throws Throwable { if (!(arg1 instanceof IntNum)) throw new IllegalArgumentException("Error in odd?, expected integer as first argument, got '" + KawaWrap.formatObject(arg1) + "'."); return ((IntNum) arg1).isOdd(); } }); } }
Fix for even? and odd?
Fix for even? and odd? git-svn-id: 4d643f7da126170ce1f05d01cf12027d96868c5b@200 07aa23ea-853d-4461-9545-5093946fcb7a
Java
bsd-3-clause
jpverkamp/wombat-ide,jpverkamp/wombat-ide
java
## Code Before: package globals; import util.KawaWrap; public class WMath extends Globals { /** * Add methods related to (random n) * * @param kawa The interpreter to add them to. * @throws Throwable If anything breaks while adding them. */ @Override public void addMethods(KawaWrap kawa) throws Throwable { kawa.eval("(define (sub1 n) (- n 1))"); kawa.eval("(define (add1 n) (+ n 1))"); } } ## Instruction: Fix for even? and odd? git-svn-id: 4d643f7da126170ce1f05d01cf12027d96868c5b@200 07aa23ea-853d-4461-9545-5093946fcb7a ## Code After: package globals; import gnu.mapping.Procedure1; import gnu.math.IntNum; import util.KawaWrap; public class WMath extends Globals { /** * Add methods related to (random n) * * @param kawa The interpreter to add them to. * @throws Throwable If anything breaks while adding them. */ @Override public void addMethods(KawaWrap kawa) throws Throwable { kawa.eval("(define (sub1 n) (- n 1))"); kawa.eval("(define (add1 n) (+ n 1))"); kawa.bind(new Procedure1("even?") { @Override public Object apply1(Object arg1) throws Throwable { if (!(arg1 instanceof IntNum)) throw new IllegalArgumentException("Error in even?, expected integer as first argument, got '" + KawaWrap.formatObject(arg1) + "'."); return !((IntNum) arg1).isOdd(); } }); kawa.bind(new Procedure1("odd?") { @Override public Object apply1(Object arg1) throws Throwable { if (!(arg1 instanceof IntNum)) throw new IllegalArgumentException("Error in odd?, expected integer as first argument, got '" + KawaWrap.formatObject(arg1) + "'."); return ((IntNum) arg1).isOdd(); } }); } }
// ... existing code ... package globals; import gnu.mapping.Procedure1; import gnu.math.IntNum; import util.KawaWrap; public class WMath extends Globals { // ... modified code ... public void addMethods(KawaWrap kawa) throws Throwable { kawa.eval("(define (sub1 n) (- n 1))"); kawa.eval("(define (add1 n) (+ n 1))"); kawa.bind(new Procedure1("even?") { @Override public Object apply1(Object arg1) throws Throwable { if (!(arg1 instanceof IntNum)) throw new IllegalArgumentException("Error in even?, expected integer as first argument, got '" + KawaWrap.formatObject(arg1) + "'."); return !((IntNum) arg1).isOdd(); } }); kawa.bind(new Procedure1("odd?") { @Override public Object apply1(Object arg1) throws Throwable { if (!(arg1 instanceof IntNum)) throw new IllegalArgumentException("Error in odd?, expected integer as first argument, got '" + KawaWrap.formatObject(arg1) + "'."); return ((IntNum) arg1).isOdd(); } }); } } // ... rest of the code ...