diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,8 +20,6 @@ setup(
license="MIT",
include_package_data=True,
packages=['config_resolver'],
- install_requires=DEPENDENCIES,
- requires=DEPENDENCIES,
package_data={
'config_resolver': ['py.typed'],
},
|
Remove obsolete dependencies from setup.py
|
py
|
diff --git a/dusty/commands/run.py b/dusty/commands/run.py
index <HASH>..<HASH> 100644
--- a/dusty/commands/run.py
+++ b/dusty/commands/run.py
@@ -54,7 +54,10 @@ def restart_apps_or_services(*app_or_service_names):
yield "Restarting the following apps or services: {}".format(', '.join(app_or_service_names))
else:
yield "Restarting all active containers associated with Dusty"
- specs = spec_assembler.get_specs()
- app_names = [app_name for app_name in app_or_service_names if app_name in specs['apps']]
- rsync.sync_repos_by_app_name(app_names)
+ if len(app_or_service_names) > 0:
+ specs = spec_assembler.get_specs()
+ app_names = [app_name for app_name in app_or_service_names if app_name in specs['apps']]
+ rsync.sync_repos_by_app_name(app_names)
+ else:
+ rsync.sync_repos(spec_assembler.get_all_repos(active_only=True, include_specs_repo=False))
compose.restart_running_services(app_or_service_names)
|
if no repos are passed then resync all repos
|
py
|
diff --git a/Malmo/samples/Python_examples/depth_map_runner.py b/Malmo/samples/Python_examples/depth_map_runner.py
index <HASH>..<HASH> 100755
--- a/Malmo/samples/Python_examples/depth_map_runner.py
+++ b/Malmo/samples/Python_examples/depth_map_runner.py
@@ -194,7 +194,7 @@ if agent_host.receivedArgument("test"):
else:
num_reps = 30000
-my_mission_record_spec = MalmoPython.MissionRecordSpec()
+my_mission_record = MalmoPython.MissionRecordSpec()
if recordingsDirectory:
my_mission_record.recordRewards()
my_mission_record.recordObservations()
@@ -208,7 +208,7 @@ for iRepeat in range(num_reps):
max_retries = 3
for retry in range(max_retries):
try:
- agent_host.startMission( my_mission, my_mission_record_spec )
+ agent_host.startMission( my_mission, my_mission_record )
break
except RuntimeError as e:
if retry == max_retries - 1:
|
Cut/paste error in depthmap sample
|
py
|
diff --git a/tests/vulnerabilities_across_files_test.py b/tests/vulnerabilities_across_files_test.py
index <HASH>..<HASH> 100644
--- a/tests/vulnerabilities_across_files_test.py
+++ b/tests/vulnerabilities_across_files_test.py
@@ -15,7 +15,7 @@ from pyt.project_handler import get_directory_modules, get_modules
from pyt.reaching_definitions_taint import ReachingDefinitionsTaintAnalysis
from pyt.vulnerabilities import find_vulnerabilities
-
+nosec_lines = set()
class EngineTest(BaseTestCase):
def run_analysis(self, path):
path = os.path.normpath(path)
@@ -40,7 +40,8 @@ class EngineTest(BaseTestCase):
VulnerabilityFiles(
default_blackbox_mapping_file,
default_trigger_word_file
- )
+ ),
+ nosec_lines
)
def test_find_vulnerabilities_absolute_from_file_command_injection(self):
|
added empty nosec_lines for tests
|
py
|
diff --git a/salt/modules/kvm_hyper.py b/salt/modules/kvm_hyper.py
index <HASH>..<HASH> 100644
--- a/salt/modules/kvm_hyper.py
+++ b/salt/modules/kvm_hyper.py
@@ -42,6 +42,8 @@ def __virtual__():
'''
Apply this module as the hyper module if the minion is a kvm hypervisor
'''
+ if 'virtual' not in __grains__:
+ return False
if __grains__['virtual'] != 'physical':
return False
if 'kvm_' not in open('/proc/modules').read():
|
Check for existence of 'virtual' in __grains__ first. This was breaking the minion on Windows.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,6 +20,7 @@ else:
platforms='any',
install_requires=[
'Flask',
+ 'typing',
'paho-mqtt'
],
classifiers=[
|
Update setup.py add typing package for type annotation support on python < <I>
|
py
|
diff --git a/wafer/settings.py b/wafer/settings.py
index <HASH>..<HASH> 100644
--- a/wafer/settings.py
+++ b/wafer/settings.py
@@ -147,9 +147,9 @@ INSTALLED_APPS = (
'wafer.sponsors',
'wafer.pages',
'wafer.tickets',
+ 'wafer.compare',
# Django isn't finding the overridden templates
'registration',
- 'wafer.compare',
)
from django.db import migrations
|
Keep all the wafer apps together in the settings file
|
py
|
diff --git a/sen/tui/init.py b/sen/tui/init.py
index <HASH>..<HASH> 100644
--- a/sen/tui/init.py
+++ b/sen/tui/init.py
@@ -192,8 +192,10 @@ class UI(urwid.MainLoop):
add_subwidget(str(containers_count), "status_text_red")
add_subwidget(", Running: ")
- add_subwidget(str(len(self.d.sorted_containers(sort_by_time=False, stopped=False))),
- "status_text_green")
+ running_containers = self.d.sorted_containers(sort_by_time=False, stopped=False)
+ running_containers_n = len(running_containers)
+ add_subwidget(str(running_containers_n),
+ "status_text_green" if running_containers_n > 0 else "status_text")
try:
command_name, command_took = self.d.last_command.popleft()
|
status: print 0 running containers in gray
|
py
|
diff --git a/tests/test_composition.py b/tests/test_composition.py
index <HASH>..<HASH> 100644
--- a/tests/test_composition.py
+++ b/tests/test_composition.py
@@ -396,6 +396,7 @@ def test_groups_can_have_completion_callbacks(stub_broker, stub_worker, rate_lim
@dramatiq.actor
def finalize(n):
+ assert n == 42
finalize_times.append(time.monotonic())
finalized.set()
@@ -452,6 +453,7 @@ def test_groups_of_pipelines_can_have_completion_callbacks(stub_broker, stub_wor
@dramatiq.actor
def finalize(n):
+ assert n == 42
finalize_times.append(time.monotonic())
finalized.set()
|
test(composition): assert on value of param to cb
|
py
|
diff --git a/tests/tests.py b/tests/tests.py
index <HASH>..<HASH> 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -807,6 +807,10 @@ class TestZappa(unittest.TestCase):
zappa_cli.print_logs(logs, colorize=True, non_http=False)
zappa_cli.print_logs(logs, colorize=True, non_http=True, http=True)
zappa_cli.print_logs(logs, colorize=True, non_http=False, http=False)
+ zappa_cli.print_logs(logs, colorize=False, force_colorize=False)
+ zappa_cli.print_logs(logs, colorize=False, force_colorize=True)
+ zappa_cli.print_logs(logs, colorize=True, force_colorize=False)
+ zappa_cli.print_logs(logs, colorize=True, non_http=False, http=False, force_colorize=True)
zappa_cli.check_for_update()
def test_cli_format_invoke_command(self):
|
Expand test to include new force_colorize parameter.
|
py
|
diff --git a/src/livestreamer_cli/main.py b/src/livestreamer_cli/main.py
index <HASH>..<HASH> 100644
--- a/src/livestreamer_cli/main.py
+++ b/src/livestreamer_cli/main.py
@@ -174,6 +174,7 @@ def output_stream_http(plugin, streams):
server.close(True)
player.close()
+ server.close()
def output_stream_passthrough(stream):
|
cli: Explicitly close the server listening socket. Avoids a ResourceWarning for “--player-continuous-http” mode.
|
py
|
diff --git a/astrocats/supernovae/tasks/cfa.py b/astrocats/supernovae/tasks/cfa.py
index <HASH>..<HASH> 100644
--- a/astrocats/supernovae/tasks/cfa.py
+++ b/astrocats/supernovae/tasks/cfa.py
@@ -260,7 +260,7 @@ def do_cfa_spectra(catalog):
fluxes=fluxes, u_time='MJD' if time else '', time=time,
instrument=instrument, source=sources,
dereddened=False, deredshifted=False)
- if catalog.args.travis and fi >= TRAVIS_QUERY_LIMIT:
+ if catalog.args.travis and fi >= catalog.TRAVIS_QUERY_LIMIT:
break
catalog.journal_entries()
@@ -318,7 +318,7 @@ def do_cfa_spectra(catalog):
fluxes=fluxes, u_time='MJD' if time else '', time=time,
instrument=instrument, source=source,
dereddened=False, deredshifted=False)
- if catalog.args.travis and fi >= TRAVIS_QUERY_LIMIT:
+ if catalog.args.travis and fi >= catalog.TRAVIS_QUERY_LIMIT:
break
catalog.journal_entries()
|
BUG: changed travis ref
|
py
|
diff --git a/scripts/issues.py b/scripts/issues.py
index <HASH>..<HASH> 100755
--- a/scripts/issues.py
+++ b/scripts/issues.py
@@ -299,15 +299,6 @@ def generate_changelog(issues, after, heading, rtag=False):
content = f.read()
f.seek(0)
write(f.write, '\n', content)
- # Insert the summary points from the <tag>.rst file into the CHANGELOG
- flines = []
- with open("../CHANGELOG", "r") as f:
- flines = f.readlines()
- with open("../sphinx/source/docs/releases/" + rtag + ".rst", "r") as f:
- starters = ("*", " -", " ")
- flines[2:2] = [" " + line for line in f.readlines() if line.startswith(starters)]
- with open("../CHANGELOG", "w") as f:
- f.writelines(flines)
else:
write(print)
|
don't copy the release notes into the changelog
|
py
|
diff --git a/test/testing/test_waiting.py b/test/testing/test_waiting.py
index <HASH>..<HASH> 100644
--- a/test/testing/test_waiting.py
+++ b/test/testing/test_waiting.py
@@ -168,6 +168,20 @@ class TestPatchWait(object):
with wait_for_call(echo, 'upper'):
assert echo.proxy(arg) == "HELLO"
+ def test_patch_class(self):
+
+ class Echo(object):
+
+ def upper(self, arg):
+ return arg.upper()
+
+ echo = Echo()
+ arg = "hello"
+
+ with wait_for_call(Echo, 'upper'):
+ res = echo.upper(arg)
+ assert res == "HELLO"
+
def test_result(self):
class Echo(object):
|
add missing testcase for patching methods on the class, not instance
|
py
|
diff --git a/pipes/securitygroup/create_securitygroup.py b/pipes/securitygroup/create_securitygroup.py
index <HASH>..<HASH> 100644
--- a/pipes/securitygroup/create_securitygroup.py
+++ b/pipes/securitygroup/create_securitygroup.py
@@ -25,10 +25,10 @@ class SpinnakerSecurityGroup:
app_name: Str of application name add Security Group to.
"""
- def __init__(self, app_name=''):
+ def __init__(self, app_info):
self.log = logging.getLogger(__name__)
- self.app_name = self.app_exists(app_name=app_name)
+ self.app_name = self.app_exists(app_name=app_info['name'])
self.here = os.path.dirname(os.path.realpath(__file__))
@@ -154,7 +154,7 @@ def main():
'environment': args.environment,
'subnet': args.subnet, }
- spinnakerapps = SpinnakerSecurityGroup(app_name=args.name)
+ spinnakerapps = SpinnakerSecurityGroup(app_info=appinfo)
sg_json = spinnakerapps.get_template(
template_name='securitygroup_template.json',
template_dict=appinfo)
|
Pass a dict of appinfo details instead of multiple parameters Eventually we will need all the application details, so its simpler to pass a dict with all the values rather than specifying each specific parameter needed in the class. Related: PSOBAT-<I>
|
py
|
diff --git a/lewis/core/simulation.py b/lewis/core/simulation.py
index <HASH>..<HASH> 100644
--- a/lewis/core/simulation.py
+++ b/lewis/core/simulation.py
@@ -223,8 +223,8 @@ class Simulation(object):
with self._adapters.device_lock:
self._device.process(delta_simulation)
- if self._control_server:
- self._control_server.process()
+ if self._control_server:
+ self._control_server.process()
self._cycles += 1
self._runtime += delta_simulation
@@ -307,8 +307,9 @@ class Simulation(object):
'The following parameters do not exist in the device or are methods: {}.'
'Parameters not updated.'.format(invalid_parameters))
- for name, value in parameters.items():
- setattr(self._device, name, value)
+ with self._adapters.device_lock:
+ for name, value in parameters.items():
+ setattr(self._device, name, value)
self.log.debug('Updated device parameters: %s', parameters)
|
Fix problems with ControlServer deadlock
|
py
|
diff --git a/tests/proton_tests/engine.py b/tests/proton_tests/engine.py
index <HASH>..<HASH> 100644
--- a/tests/proton_tests/engine.py
+++ b/tests/proton_tests/engine.py
@@ -528,7 +528,7 @@ class CreditTest(Test):
self.pump()
idx += 1
- assert idx == 1034, idx
+ assert idx == PN_SESSION_WINDOW + 10, idx
assert pn_queued(self.rcv) == PN_SESSION_WINDOW, pn_queued(self.rcv)
@@ -637,6 +637,7 @@ class CreditTest(Test):
self.pump()
idx += 1
+ assert idx == PN_SESSION_WINDOW + 10, idx
assert pn_queued(self.rcv) == PN_SESSION_WINDOW, pn_queued(self.rcv)
pn_flow(self.rcv, 1)
|
fixed hardcoded assertion; added additional assertion git-svn-id: <URL>
|
py
|
diff --git a/src/nlpia/constants.py b/src/nlpia/constants.py
index <HASH>..<HASH> 100644
--- a/src/nlpia/constants.py
+++ b/src/nlpia/constants.py
@@ -18,7 +18,7 @@ import platform
LOG_LEVEL = 'WARN' if not os.environ.get('DEBUG') else 'DEBUG'
-BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
system_name = platform.system()
if system_name == 'Darwin':
@@ -92,7 +92,7 @@ logging.config.dictConfig(LOGGING_CONFIG)
logger = logging.getLogger(__name__)
USER_HOME = os.path.expanduser("~")
-PROJECT_PATH = os.path.dirname(os.path.dirname(__file__))
+PROJECT_PATH = PRJECT_DIR = BASE_DIR
DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
BOOK_PATH = os.path.join(DATA_PATH, 'book')
|
fix constants.BASE_DIR = PROJECT_PATH = PROJECT_DIR
|
py
|
diff --git a/django_select2/widgets.py b/django_select2/widgets.py
index <HASH>..<HASH> 100644
--- a/django_select2/widgets.py
+++ b/django_select2/widgets.py
@@ -184,8 +184,9 @@ class Select2Mixin(object):
"""
options = json.dumps(self.get_options())
options = options.replace('"*START*', '').replace('*END*"', '')
- # selector variable must already be passed to this
- return '$(hashedSelector).select2(%s);' % (options)
+ js = 'var hashedSelector = "#" + "%s";' % id_
+ js += '$(hashedSelector).select2(%s);' % (options)
+ return js
def render(self, name, value, attrs=None, choices=()):
"""
|
define hashedSelector in Select2Mixin
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -34,6 +34,7 @@ setup(
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: System :: Systems Administration',
],
|
Mark bootstrapper as ready for Python <I>.
|
py
|
diff --git a/otherfile/font_wizard.py b/otherfile/font_wizard.py
index <HASH>..<HASH> 100644
--- a/otherfile/font_wizard.py
+++ b/otherfile/font_wizard.py
@@ -9,14 +9,24 @@ Letters = string.ascii_letters + string.punctuation + string.digits
Font_List = list(art.art_param.FONT_MAP.keys())
INVALID_FONT_NAME = [
+ "mix",
"wizard",
"wiz",
+ "magic",
+ "random",
+ "random-na",
"random-xlarge",
"random-large",
"random-medium",
- "random-small"
- "random",
+ "random-small",
+ "rand",
+ "rand-na",
+ "rand-xlarge",
+ "rand-large",
+ "rand-medium",
+ "rand-small",
"rnd",
+ "rnd-na",
"rnd-xlarge",
"rnd-large",
"rnd-medium",
|
edit : INVALID_FONT_NAME completed.
|
py
|
diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py
index <HASH>..<HASH> 100644
--- a/salt/modules/win_pkg.py
+++ b/salt/modules/win_pkg.py
@@ -1882,7 +1882,7 @@ def get_repo_data(saltenv='base'):
serial = salt.payload.Serial(__opts__)
with salt.utils.files.fopen(repo_details.winrepo_file, 'rb') as repofile:
try:
- repodata = salt.utils.data.decode(serial.loads(repofile.read(), encoding='utf-8') or {})
+ repodata = salt.utils.data.decode(serial.loads(repofile.read()) or {})
__context__['winrepo.data'] = repodata
return repodata
except Exception as exc:
|
Remove hacky workaround to get Windows to decode deserialized data properly
|
py
|
diff --git a/tests/linalg_test.py b/tests/linalg_test.py
index <HASH>..<HASH> 100644
--- a/tests/linalg_test.py
+++ b/tests/linalg_test.py
@@ -185,7 +185,6 @@ class NumpyLinalgTest(jtu.JaxTestCase):
(2, 2, 2), (2, 3, 3), (3, 2, 2)]
for dtype in float_types + complex_types
for rng_factory in [jtu.rand_default]))
- @jtu.skip_on_devices("tpu")
def testSlogdet(self, shape, dtype, rng_factory):
rng = rng_factory(self.rng())
jtu.skip_if_unsupported_type(dtype)
@@ -1178,7 +1177,6 @@ class ScipyLinalgTest(jtu.JaxTestCase):
(True, (2, 8, 8), (2, 8, 10)),
]
for rng_factory in [jtu.rand_default]))
- @jtu.skip_on_devices("tpu") # TODO(phawkins): Test fails on TPU.
def testTriangularSolveGrad(
self, lower, transpose_a, conjugate_a, unit_diagonal, left_side, a_shape,
b_shape, dtype, rng_factory):
|
Enable TPU linalg tests that now pass.
|
py
|
diff --git a/tests/test_web.py b/tests/test_web.py
index <HASH>..<HASH> 100644
--- a/tests/test_web.py
+++ b/tests/test_web.py
@@ -119,12 +119,11 @@ class TestUrlPrefix(unittest2.TestCase):
class TestHttps(unittest2.TestCase):
def _run(self, https=False):
- options = {}
+ options = {'PSDASH_PORT': 5051}
if https:
options = {
'PSDASH_HTTPS_KEYFILE': os.path.join(os.path.dirname(__file__), 'keyfile'),
- 'PSDASH_HTTPS_CERTFILE': os.path.join(os.path.dirname(__file__), 'cacert.pem'),
- 'PSDASH_PORT': 5051
+ 'PSDASH_HTTPS_CERTFILE': os.path.join(os.path.dirname(__file__), 'cacert.pem')
}
self.r = PsDashRunner(options)
self.runner = gevent.spawn(self.r.run)
|
Trying to fix failing tests in Travis CI.
|
py
|
diff --git a/best/plot.py b/best/plot.py
index <HASH>..<HASH> 100644
--- a/best/plot.py
+++ b/best/plot.py
@@ -19,7 +19,7 @@ from pymc.distributions import noncentral_t_like
pretty_blue = '#89d1ea'
def plot_posterior( sample_vec, bins=None, ax=None, title=None, stat='mode',
- label='', draw_zero=False ):
+ label='', draw_zero=False ):
hdi_min, hdi_max = hdi_of_mcmc( sample_vec )
@@ -48,8 +48,12 @@ def plot_posterior( sample_vec, bins=None, ax=None, title=None, stat='mode',
raise ValueError('unknown stat %s'%stat)
if ax is not None:
- ax.hist( sample_vec, bins=bins, rwidth=0.8,
- facecolor=pretty_blue, edgecolor='none' )
+ if bins is not None:
+ kwargs = {'bins':bins}
+ else:
+ kwargs = {}
+ ax.hist( sample_vec, rwidth=0.8,
+ facecolor=pretty_blue, edgecolor='none', **kwargs )
if title is not None:
ax.set_title( title )
|
bugfix: set bins only if not None
|
py
|
diff --git a/lark/tree.py b/lark/tree.py
index <HASH>..<HASH> 100644
--- a/lark/tree.py
+++ b/lark/tree.py
@@ -160,6 +160,15 @@ class SlottedTree(Tree):
def pydot__tree_to_png(tree, filename, rankdir="LR", **kwargs):
+ graph = pydot__tree_to_graph(tree, rankdir, **kwargs)
+ graph.write_png(filename)
+
+
+def pydot__tree_to_dot(tree, filename, rankdir="LR", **kwargs):
+ graph = pydot__tree_to_graph(tree, rankdir, **kwargs)
+ graph.write(filename)
+
+def pydot__tree_to_graph(tree, rankdir="LR", **kwargs):
"""Creates a colorful image that represents the tree (data+children, without meta)
Possible values for `rankdir` are "TB", "LR", "BT", "RL", corresponding to
@@ -197,5 +206,5 @@ def pydot__tree_to_png(tree, filename, rankdir="LR", **kwargs):
return node
_to_pydot(tree)
- graph.write_png(filename)
-
+ return graph
+
|
Added pydot__tree_to_dot and pydot__tree_to_graph, changed pydot__tree_to_png
|
py
|
diff --git a/vanilla/signal.py b/vanilla/signal.py
index <HASH>..<HASH> 100644
--- a/vanilla/signal.py
+++ b/vanilla/signal.py
@@ -27,7 +27,8 @@ class __plugin__(object):
self.start()
def handler(sig, frame):
- self.p.send(chr(sig))
+ if self.p:
+ self.p.send(chr(sig))
self.mapper[sig] = self.hub.broadcast()
self.mapper[sig].onempty(self.uncapture, sig)
|
attempt at fix to gracefully handle shutdown; sometimes p can be None during shutdown, but there are still incoming signals
|
py
|
diff --git a/tangelo/tangelo/__init__.py b/tangelo/tangelo/__init__.py
index <HASH>..<HASH> 100644
--- a/tangelo/tangelo/__init__.py
+++ b/tangelo/tangelo/__init__.py
@@ -16,7 +16,7 @@ def content_type(t=None):
def header(h, t=None):
- r = cherrypy.response.headers[h]
+ r = cherrypy.response.headers.get(h, None)
if t is not None:
cherrypy.response.headers[h] = t
|
changed header function to allow addition of custom headers
|
py
|
diff --git a/muda/deformers/background.py b/muda/deformers/background.py
index <HASH>..<HASH> 100644
--- a/muda/deformers/background.py
+++ b/muda/deformers/background.py
@@ -39,7 +39,7 @@ def sample_clip_indices(filename, n_samples, sr):
with psf.SoundFile(str(filename), mode='r') as soundf:
- n_target = int(np.ceil(n_samples * soundf.samplerate / sr))
+ n_target = int(np.ceil(n_samples * soundf.samplerate / float(sr)))
# Draw a random clip
start = np.random.randint(0, len(soundf) - n_target)
|
Cast `sr` to float for legacy python
|
py
|
diff --git a/netmiko/ssh_dispatcher.py b/netmiko/ssh_dispatcher.py
index <HASH>..<HASH> 100644
--- a/netmiko/ssh_dispatcher.py
+++ b/netmiko/ssh_dispatcher.py
@@ -19,6 +19,7 @@ CLASS_MAPPER = {
'cisco_nxos' : CiscoNxosSSH,
'cisco_xr' : CiscoXrSSH,
'cisco_wlc_ssh' : CiscoWlcSSH,
+ 'cisco_wlc' : CiscoWlcSSH,
'arista_eos' : AristaSSH,
'hp_procurve' : HPProcurveSSH,
'hp_comware' : HPComwareSSH,
|
Fix discrepency in class mapper naming for cisco_wlc
|
py
|
diff --git a/py2/h2o.py b/py2/h2o.py
index <HASH>..<HASH> 100644
--- a/py2/h2o.py
+++ b/py2/h2o.py
@@ -15,6 +15,7 @@ def cloud_name():
def init(*args, **kwargs):
global nodes, n0
+ unit_main()
nodes = h2o_bc.init(*args, **kwargs)
# use to save typing?
n0 = nodes[0]
|
After discussing with Kevin, moved h2o.unit_main() into h2o.init().
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -22,9 +22,9 @@ class Mock(MagicMock):
def __getattr__(cls, name):
return Mock()
-MOCK_MODULES = ['gi.repository', 'Gtk', 'GObject', 'sorting', 'choice',
+MOCK_MODULES = ['gi.repository', 'Gtk', 'GObject', 'sorting', 'choice',
'queueing_tool.queues.choice', 'queueing_tool.network.sorting',
- 'scipy', 'numpy', 'numpy.random', 'graph_tool.all']
+ 'scipy', 'numpy', 'numpy.random', 'graph_tool.all', 'graph_tool']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
|
Fixed error on read the docs
|
py
|
diff --git a/phy/cluster/manual/views.py b/phy/cluster/manual/views.py
index <HASH>..<HASH> 100644
--- a/phy/cluster/manual/views.py
+++ b/phy/cluster/manual/views.py
@@ -1396,6 +1396,12 @@ class CorrelogramView(ManualClusteringView):
color=color,
ylim=ylim,
)
+ # Cluster labels.
+ if i == (n_clusters - 1):
+ self[i, j].text(pos=[0., -1.],
+ text=str(cluster_ids[j]),
+ anchor=[0., -1.04],
+ )
def toggle_normalization(self):
"""Change the normalization of the correlograms."""
|
Add cluster labels in correlogram view
|
py
|
diff --git a/appstream/component.py b/appstream/component.py
index <HASH>..<HASH> 100644
--- a/appstream/component.py
+++ b/appstream/component.py
@@ -177,6 +177,8 @@ class Component(object):
xml += ' <summary>%s</summary>\n' % self.summary
if self.developer_name:
xml += ' <developer_name>%s</developer_name>\n' % self.developer_name
+ if self.project_license:
+ xml += ' <project_license>%s</project_license>\n' % self.project_license
if self.description:
xml += ' <description>%s</description>\n' % self.description
for key in self.urls:
|
Write the component project_license when exporting to XML
|
py
|
diff --git a/django_google_maps/fields.py b/django_google_maps/fields.py
index <HASH>..<HASH> 100644
--- a/django_google_maps/fields.py
+++ b/django_google_maps/fields.py
@@ -92,4 +92,11 @@ class GeoLocationField(models.CharField):
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
- return self.get_db_prep_value(value)
\ No newline at end of file
+ return self.get_db_prep_value(value)
+
+try:
+ from south.modelsinspector import add_introspection_rules
+ add_introspection_rules([], ["^django_google_maps\.fields\.GeoLocationField"])
+ add_introspection_rules([], ["^django_google_maps\.fields\.AddressField"])
+except ImportError:
+ pass
\ No newline at end of file
|
added introspection rules for south migrations
|
py
|
diff --git a/normalize/diff.py b/normalize/diff.py
index <HASH>..<HASH> 100644
--- a/normalize/diff.py
+++ b/normalize/diff.py
@@ -178,8 +178,8 @@ def compare_record_iter(a, b, fs_a=None, fs_b=None, options=None):
)
elif (options.duck_type or type(propval_a) == type(propval_b)) \
and isinstance(propval_a, COMPARABLE):
- for types, func in COMPARE_FUNCTIONS.iteritems():
- if isinstance(propval_a, types):
+ for type_union, func in COMPARE_FUNCTIONS.iteritems():
+ if isinstance(propval_a, type_union):
for diff in func(
propval_a, propval_b, fs_a + [propname],
fs_b + [propname], options,
@@ -438,8 +438,8 @@ def diff_iter(base, other, options=None, **kwargs):
)
generators = []
- for types, func in COMPARE_FUNCTIONS.iteritems():
- if isinstance(base, types):
+ for type_union, func in COMPARE_FUNCTIONS.iteritems():
+ if isinstance(base, type_union):
generators.append(func(base, other, options=options))
if len(generators) == 1:
|
Fix a pyflakes error in diff Pyflakes protests: normalize/diff.py:<I>: import 'types' from line 7 shadowed by loop variable normalize/diff.py:<I>: import 'types' from line 7 shadowed by loop variable Fix it.
|
py
|
diff --git a/wandb/wandb_torch.py b/wandb/wandb_torch.py
index <HASH>..<HASH> 100644
--- a/wandb/wandb_torch.py
+++ b/wandb/wandb_torch.py
@@ -253,8 +253,10 @@ class TorchGraph(wandb.data_types.Graph):
graph.nodes_by_id[id(param)] = node
graph.add_node(node)
if not graph.criterion_passed:
- if hasattr(output[0], 'grad_fn'):
+ if hasattr(output[0], 'grad_fn'):
graph.criterion = output[0].grad_fn
+ elif isinstance(output[0], list) and hasattr(output[0][0], 'grad_fn'):
+ graph.criterion = output[0][0].grad_fn
return after_forward_hook
def hook_torch_modules(self, module, criterion=None, prefix=None, graph_idx=0):
@@ -449,3 +451,4 @@ class TorchGraph(wandb.data_types.Graph):
node.class_name = type(module).__name__
return node
+
|
fix(wandb_torch): extract graph.criterion for RNN models
|
py
|
diff --git a/salt/modules/boto_vpc.py b/salt/modules/boto_vpc.py
index <HASH>..<HASH> 100644
--- a/salt/modules/boto_vpc.py
+++ b/salt/modules/boto_vpc.py
@@ -281,7 +281,8 @@ def delete(vpc_id=None, name=None, tags=None, region=None, key=None, keyid=None,
try:
if not vpc_id:
- vpc_id = get_id(name=name, tags=tags)
+ vpc_id = get_id(name=name, tags=tags, region=region, key=key,
+ keyid=keyid, profile=profile)
if conn.delete_vpc(vpc_id):
log.info('VPC {0} was deleted.'.format(vpc_id))
|
Fixes boto VPC delete by passing connection details
|
py
|
diff --git a/mautrix/api/http.py b/mautrix/api/http.py
index <HASH>..<HASH> 100644
--- a/mautrix/api/http.py
+++ b/mautrix/api/http.py
@@ -76,6 +76,8 @@ class PathBuilder:
Args:
append: The string to append.
"""
+ if append is None:
+ return self
return PathBuilder(self.path + append)
def __eq__(self, other: Union['PathBuilder', str]) -> bool:
@@ -86,6 +88,8 @@ class PathBuilder:
return urllib_quote(string, safe="")
def __getitem__(self, append: Union[str, int]) -> 'PathBuilder':
+ if append is None:
+ return self
return PathBuilder(f"{self.path}/{self._quote(str(append))}")
@@ -162,7 +166,7 @@ class HTTPAPI:
async def request(self, method: Method, path: PathBuilder,
content: Optional[Union[JSON, bytes, str]] = None,
headers: Optional[Dict[str, str]] = None,
- query_params: Optional[Dict[str, str]] = None) -> Awaitable[JSON]:
+ query_params: Optional[Dict[str, str]] = None) -> JSON:
"""
Make a raw HTTP request.
|
Fix PathBuilder.__getitem__ with None append
|
py
|
diff --git a/dukpy/install.py b/dukpy/install.py
index <HASH>..<HASH> 100644
--- a/dukpy/install.py
+++ b/dukpy/install.py
@@ -78,8 +78,9 @@ def install_jspackage(package_name, version, modulesdir):
tmpdir = tempfile.mkdtemp()
try:
tb.extractall(tmpdir)
+ dirname = os.listdir(tmpdir)[0]
shutil.rmtree(os.path.abspath(dest), ignore_errors=True)
- shutil.move(os.path.join(tmpdir, 'package'),
+ shutil.move(os.path.join(tmpdir, dirname),
os.path.abspath(dest))
finally:
shutil.rmtree(tmpdir)
|
Support packages that don't contain package directory
|
py
|
diff --git a/odinweb/decorators.py b/odinweb/decorators.py
index <HASH>..<HASH> 100644
--- a/odinweb/decorators.py
+++ b/odinweb/decorators.py
@@ -175,7 +175,7 @@ class Operation(object):
raise HttpError(HTTPStatus.BAD_REQUEST, 40099, "Unable to decode request body.", str(ude))
try:
- resource = request.request_codec.loads(body, resource=self.resource, full_clean=False)
+ resource = request.request_codec.loads(body, resource=self.resource, full_clean=True)
except ValueError as ve:
raise HttpError(HTTPStatus.BAD_REQUEST, 40098, "Unable to load resource.", str(ve))
|
Ensure a full clean is performed
|
py
|
diff --git a/pybotvac/robot.py b/pybotvac/robot.py
index <HASH>..<HASH> 100644
--- a/pybotvac/robot.py
+++ b/pybotvac/robot.py
@@ -66,9 +66,9 @@ class Robot:
# navigation_mode: 1 normal, 2 extra care, 3 deep
# category: 2 non-persistent map, 4 persistent map
- #Default to using the persistent map if we support basic-3.
+ # Default to using the persistent map if we support basic-3 or basic-4.
if category is None:
- category = 4 if self.service_version == 'basic-3' else 2
+ category = 4 if self.service_version in ['basic-3', 'basic-4'] else 2
if self.service_version == 'basic-1':
json = {'reqId': "1",
|
Default to category 4 (persistant map) for basic-3 and basic-4
|
py
|
diff --git a/lib/pysynphot/observation.py b/lib/pysynphot/observation.py
index <HASH>..<HASH> 100644
--- a/lib/pysynphot/observation.py
+++ b/lib/pysynphot/observation.py
@@ -84,13 +84,13 @@ class Observation(spectrum.CompositeSourceSpectrum):
self.spectrum=self.spectrum.taper()
except AttributeError:
self.spectrum=self.spectrum.tabulate().taper()
- self.warnings['PartialOverlap']=True
+ self.warnings['PartialOverlap']=force
elif force.lower().startswith('extrap'):
#default behavior works, but check the overlap so we can set the warning
stat=self.bandpass.check_overlap(self.spectrum)
if stat == 'partial':
- self.warnings['PartialOverlap']=True
+ self.warnings['PartialOverlap']=force
else:
raise(KeyError("Illegal value force=%s; legal values=('taper','extrap')"%force))
@@ -211,9 +211,15 @@ class Observation(spectrum.CompositeSourceSpectrum):
# (self.spectrum*self.bandpass) * other
#
def __mul__(self, other):
+ # If the original object has partial overlap warnings, then
+ # the forcing behavior also needs to be propagated.
+
+ force = self.warnings.get('PartialOverlap', None)
+
result = Observation(self.spectrum,
- self.bandpass * other,
- binset=self.binset
+ self.bandpass * other,
+ binset=self.binset
+ force=force
)
return result
|
Change contents of PartialOverlap warning in Observation.warnings to preserve the value of the force keyword that was used, so that it can be used if the Observation needs to be remade (as in Observation.__mul__ for scalar multiplication. Needed for pyetc. git-svn-id: <URL>
|
py
|
diff --git a/crispy_forms/templatetags/crispy_forms_field.py b/crispy_forms/templatetags/crispy_forms_field.py
index <HASH>..<HASH> 100644
--- a/crispy_forms/templatetags/crispy_forms_field.py
+++ b/crispy_forms/templatetags/crispy_forms_field.py
@@ -1,6 +1,6 @@
from django import forms, template
from django.conf import settings
-from django.template import Context, loader
+from django.template import Context, Variable, loader
from crispy_forms.utils import get_template_pack
@@ -81,7 +81,7 @@ class CrispyFieldNode(template.Node):
# variables in the current rendering context first
if self not in context.render_context:
context.render_context[self] = (
- template.Variable(self.field),
+ Variable(self.field),
self.attrs,
)
@@ -110,8 +110,8 @@ class CrispyFieldNode(template.Node):
widget.attrs["class"] = css_class
for attribute_name, attribute in attr.items():
- attribute_name = template.Variable(attribute_name).resolve(context)
- attributes = template.Variable(attribute).resolve(context)
+ attribute_name = Variable(attribute_name).resolve(context)
+ attributes = Variable(attribute).resolve(context)
if attribute_name in widget.attrs:
# multiple attribtes are in a single string, e.g.
|
Import `Variable` directly as it is more efficient in a loop. (#<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -21,10 +21,13 @@ from setuptools import setup, Extension
from setuptools import find_packages
from setuptools.command.test import test as TestCommand
-
-# Utility function to read the README file.
-def read(fname):
- return open(os.path.join(os.path.dirname(__file__), fname)).read()
+# Convert Markdown to RST for PyPI
+# http://stackoverflow.com/a/26737672
+try:
+ import pypandoc
+ long_description = pypandoc.convert('README.md', 'rst')
+except (IOError, ImportError, OSError):
+ long_description = open('README.md').read()
class PyTest(TestCommand):
@@ -75,7 +78,7 @@ setup(
keywords=["ahl", "keyvalue", "tickstore", "mongo", "timeseries", ],
url="https://github.com/manahl/arctic",
packages=find_packages(),
- long_description="", # read('README'),
+ long_description=long_description,
cmdclass={'test': PyTest},
ext_modules=[compress],
setup_requires=["setuptools_cython",
|
Adding the long_description for PyPI
|
py
|
diff --git a/ape/container_mode/tasks.py b/ape/container_mode/tasks.py
index <HASH>..<HASH> 100644
--- a/ape/container_mode/tasks.py
+++ b/ape/container_mode/tasks.py
@@ -156,6 +156,20 @@ def install_container(container_name):
else:
print 'ERROR: this container does not provide an install.py!'
return
+
+ tasks.post_install_container()
+
[email protected]_helper
+def post_install_container():
+ '''
+ Refine this task to perform feature specific installations after the container
+ and its virtualenv have been installed. E.g. djpl-postgres refines this task
+ to link psycopg
+ '''
+ pass
+
+
+
@tasks.register_helper
def get_extra_pypath(container_name=None):
|
added post_install_container hook
|
py
|
diff --git a/optlang/gurobi_interface.py b/optlang/gurobi_interface.py
index <HASH>..<HASH> 100644
--- a/optlang/gurobi_interface.py
+++ b/optlang/gurobi_interface.py
@@ -126,7 +126,8 @@ class Variable(interface.Variable):
if value is None:
value = -gurobipy.GRB.INFINITY
if self.problem:
- return self._internal_variable.setAttr('LB', value)
+ self._internal_variable.setAttr('LB', value)
+ self.problem.problem.update()
@interface.Variable.ub.setter
def ub(self, value):
@@ -134,7 +135,8 @@ class Variable(interface.Variable):
if value is None:
value = gurobipy.GRB.INFINITY
if self.problem:
- return self._internal_variable.setAttr('UB', value)
+ self._internal_variable.setAttr('UB', value)
+ self.problem.problem.update()
def set_bounds(self, lb, ub):
super(Variable, self).set_bounds(lb, ub)
|
fix: update gurobi after changing bounds
|
py
|
diff --git a/perceval/_version.py b/perceval/_version.py
index <HASH>..<HASH> 100644
--- a/perceval/_version.py
+++ b/perceval/_version.py
@@ -1,2 +1,2 @@
# Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440
-__version__ = "0.9.3"
+__version__ = "0.9.4"
|
Update version number to <I>
|
py
|
diff --git a/owncloud/owncloud.py b/owncloud/owncloud.py
index <HASH>..<HASH> 100644
--- a/owncloud/owncloud.py
+++ b/owncloud/owncloud.py
@@ -748,6 +748,29 @@ class Client():
raise ResponseError(res)
+ def search_users(self, user_name):
+ """Searches for users via provisioning API.
+ If you get back an error 999, then the provisioning API is not enabled.
+
+ :param user_name: name of user to be searched for
+ :returns: list of users
+ :raises: ResponseError in case an HTTP error status was returned
+
+ """
+ res = self.__make_ocs_request(
+ 'GET',
+ self.OCS_SERVICE_CLOUD,
+ 'users?search=' + user_name
+ )
+
+ if res.status_code == 200:
+ tree = ET.fromstring(res.text)
+ users = tree.find('data/users')
+
+ return users
+
+ raise ResponseError(res)
+
def add_user_to_group(self, user_name, group_name):
"""Adds a user to a group.
|
added search_users function Can be used for searching users or listing all users
|
py
|
diff --git a/test/python/providers/test_backendconfiguration.py b/test/python/providers/test_backendconfiguration.py
index <HASH>..<HASH> 100644
--- a/test/python/providers/test_backendconfiguration.py
+++ b/test/python/providers/test_backendconfiguration.py
@@ -156,7 +156,4 @@ class TestBackendConfiguration(QiskitTestCase):
def test_deepcopy(self):
"""Ensure that a deepcopy succeeds and results in an identical object."""
copy_config = copy.deepcopy(self.config)
- print(copy_config.to_dict())
- print("Original:")
- print(self.config.to_dict())
self.assertEqual(copy_config, self.config)
|
Remove stray print from test_backendconfiguration (#<I>) In #<I> a regression test was added to validate the bugfix being added in that PR. However, a stray debug print from development slipped in and has stuck around since that merged. This commit removes the stray print to remove it from our test output since it serves no purpose.
|
py
|
diff --git a/paramiko/sftp_client.py b/paramiko/sftp_client.py
index <HASH>..<HASH> 100644
--- a/paramiko/sftp_client.py
+++ b/paramiko/sftp_client.py
@@ -72,9 +72,6 @@ class SFTPClient (BaseSFTP):
self.ultra_debug = transport.get_hexdump()
server_version = self._send_version()
self._log(INFO, 'Opened sftp connection (server version %d)' % server_version)
-
- def __del__(self):
- self.close()
def from_transport(cls, t):
"""
|
[project @ <EMAIL><I>-e7a1fccdcc4e<I>c] this del is also useless
|
py
|
diff --git a/monero_serialize/xmrrpc.py b/monero_serialize/xmrrpc.py
index <HASH>..<HASH> 100644
--- a/monero_serialize/xmrrpc.py
+++ b/monero_serialize/xmrrpc.py
@@ -1319,6 +1319,7 @@ class Modeler(object):
if x.is_type(elem_type, BlobFieldWrapper):
blobber = Blobber(self.writing, data=x.get_elem(src) if not self.writing else None)
fvalue = await blobber.blobize(elem=x.get_elem(src), elem_type=elem_type.ftype, params=params)
+ fvalue = NoSetSentinel() if fvalue is None or len(fvalue) == 0 else fvalue
elif issubclass(elem_type, x.UVarintType):
fvalue = await self.uvarint(x.get_elem(src))
|
xmrrpc: no set sentinel for blob
|
py
|
diff --git a/test/test_validate.py b/test/test_validate.py
index <HASH>..<HASH> 100644
--- a/test/test_validate.py
+++ b/test/test_validate.py
@@ -35,6 +35,7 @@ def test_validate_tpm_conditional_independence():
validate.conditionally_independent(tpm)
with pytest.raises(ValueError):
validate.tpm(tpm)
+ validate.tpm(tpm, check_independence=False)
def test_validate_cm_valid(s):
|
Udpate `validate.tpm` test
|
py
|
diff --git a/mysql/toolkit/script/prepare.py b/mysql/toolkit/script/prepare.py
index <HASH>..<HASH> 100644
--- a/mysql/toolkit/script/prepare.py
+++ b/mysql/toolkit/script/prepare.py
@@ -33,6 +33,9 @@ class PrepareSQL:
self._add_semicolon = add_semicolon
self._invalid_starts = invalid_starts
+ def __str__(self):
+ return self.prepared
+
@property
def prepared(self):
results = StringIO()
|
Added __str__ magic method
|
py
|
diff --git a/phypno/detect/spindle.py b/phypno/detect/spindle.py
index <HASH>..<HASH> 100644
--- a/phypno/detect/spindle.py
+++ b/phypno/detect/spindle.py
@@ -289,10 +289,11 @@ def _find_peak_in_fft(data, peak_in_s, chan, fft_window_length):
beg_fft = peak_in_smp - data.s_freq * fft_window_length / 2
end_fft = peak_in_smp + data.s_freq * fft_window_length / 2
- time_for_fft = data.axis['time'][0][beg_fft:end_fft]
- if len(time_for_fft) == 0:
+ if beg_fft < 0 or end_fft > data.number_of('time')[0]:
return None
+ time_for_fft = data.axis['time'][0][beg_fft:end_fft]
+
x = data(trial=TRIAL, chan=chan, time=time_for_fft)
f, Pxx = welch(x, data.s_freq, nperseg=data.s_freq)
|
only check fft if the window is between boundaries
|
py
|
diff --git a/pysc2/env/sc2_env.py b/pysc2/env/sc2_env.py
index <HASH>..<HASH> 100644
--- a/pysc2/env/sc2_env.py
+++ b/pysc2/env/sc2_env.py
@@ -643,7 +643,7 @@ class SC2Env(environment.Base):
if (game_loop < target_game_loop and
not any(o.player_result for o in self._obs)):
raise ValueError("The game didn't advance to the expected game loop")
- elif game_loop > target_game_loop:
+ elif game_loop > target_game_loop and target_game_loop > 0:
logging.warn("Received observation %d step(s) late: %d rather than %d.",
game_loop - target_game_loop, game_loop, target_game_loop)
|
Skip the warning that happens at the beginning of every game. PiperOrigin-RevId: <I>
|
py
|
diff --git a/derpconf/version.py b/derpconf/version.py
index <HASH>..<HASH> 100644
--- a/derpconf/version.py
+++ b/derpconf/version.py
@@ -8,4 +8,4 @@
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2012 globo.com [email protected]
-__version__ = "0.7.1"
+__version__ = "0.7.2"
|
Release <I> - considering class defaults as well
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ from setuptools import setup
setup(
name = 'gophish',
packages = ['gophish', 'gophish.api'],
- version = '0.1.3',
+ version = '0.1.4',
description = 'Python API Client for Gophish',
author = 'Jordan Wright',
author_email = '[email protected]',
|
Bumping release to <I>
|
py
|
diff --git a/bambou/nurest_object.py b/bambou/nurest_object.py
index <HASH>..<HASH> 100644
--- a/bambou/nurest_object.py
+++ b/bambou/nurest_object.py
@@ -660,7 +660,7 @@ class NURESTObject(object):
# HTTP Calls
- def delete(self, async=False, callback=None, response_choice=None):
+ def delete(self, response_choice=None, async=False, callback=None):
""" Delete object and call given callback in case of async call.
Args:
|
Replaced response_choice in delete method
|
py
|
diff --git a/spacy/util.py b/spacy/util.py
index <HASH>..<HASH> 100644
--- a/spacy/util.py
+++ b/spacy/util.py
@@ -417,6 +417,7 @@ def read_json(location):
location (Path): Path to JSON file.
RETURNS (dict): Loaded JSON content.
"""
+ location = ensure_path(location)
with location.open('r', encoding='utf8') as f:
return ujson.load(f)
|
Ensure path on read_json
|
py
|
diff --git a/ethereum/tests/hybrid_casper/test_casper.py b/ethereum/tests/hybrid_casper/test_casper.py
index <HASH>..<HASH> 100644
--- a/ethereum/tests/hybrid_casper/test_casper.py
+++ b/ethereum/tests/hybrid_casper/test_casper.py
@@ -13,7 +13,7 @@ for i in range(9):
alloc[utils.int_to_addr(i)] = {'balance': 1}
alloc[t.a0] = {'balance': 10**22}
alloc[t.a1] = {'balance': 10**22}
-genesis = casper_utils.make_casper_genesis(alloc, 5, 100, 0.02, 0.02)
+genesis = casper_utils.make_casper_genesis(alloc, 5, 100, 0.02, 0.002)
c = t.Chain(genesis=genesis)
t.languages['viper'] = compiler.Compiler()
t.gas_limit = 9999999
|
Fix small bug which broke casper tests
|
py
|
diff --git a/tests/test_test.py b/tests/test_test.py
index <HASH>..<HASH> 100644
--- a/tests/test_test.py
+++ b/tests/test_test.py
@@ -308,6 +308,22 @@ def test_follow_redirect():
strict_eq(resp.status_code, 200)
strict_eq(resp.data, b'current url: http://localhost/some/redirect/')
+def test_follow_redirect_with_post_307():
+ def redirect_with_post_307_app(environ, start_response):
+ req = Request(environ)
+ if req.url == 'http://localhost/some/redirect/':
+ assert req.method == 'POST', 'request should be POST'
+ assert not req.form, 'request should not have data'
+ response = Response('current url: %s' % req.url)
+ else:
+ response = redirect('http://localhost/some/redirect/', code=307)
+ return response(environ, start_response)
+
+ c = Client(redirect_with_post_307_app, response_wrapper=BaseResponse)
+ resp = c.post('/', follow_redirects=True, data='foo=blub+hehe&blah=42')
+ assert resp.status_code == 200
+ assert resp.data == b'current url: http://localhost/some/redirect/'
+
def test_follow_external_redirect():
env = create_environ('/', base_url='http://localhost')
c = Client(external_redirect_demo_app)
|
Added unit tests for the <I> redirect
|
py
|
diff --git a/example/setup.py b/example/setup.py
index <HASH>..<HASH> 100755
--- a/example/setup.py
+++ b/example/setup.py
@@ -1,12 +1,11 @@
#!/usr/bin/env python
from distutils.core import setup
-from distutils.extension import Extension
from Cython.Build import cythonize
-
+import sys
# Run Cython
-extensions=cythonize("cysignals_example.pyx")
+extensions=cythonize("cysignals_example.pyx", include_path=sys.path)
# Run Distutils
setup(
|
make example work without cython/cython#<I>
|
py
|
diff --git a/spinoff/actor/comm.py b/spinoff/actor/comm.py
index <HASH>..<HASH> 100644
--- a/spinoff/actor/comm.py
+++ b/spinoff/actor/comm.py
@@ -163,7 +163,7 @@ class Comm(BaseActor):
lambda _: self._outgoing_sock.send((node_addr, pickle.dumps((actor_id, msg)))))
def ensure_connected(self, to):
- if isinstance(self._outgoing_sock, ZmqRouter) and not self._zmq_is_connected(to=to):
+ if not self._mock_sock and not self._zmq_is_connected(to=to):
self._outgoing_sock.add_endpoints([('connect', to)])
self._connections.add(to)
return sleep(0.005)
|
A subtle semantic clean-up/fix in Comm.ensure_connected--comparing against an actor class does not allow use of refs
|
py
|
diff --git a/stravalib/protocol.py b/stravalib/protocol.py
index <HASH>..<HASH> 100644
--- a/stravalib/protocol.py
+++ b/stravalib/protocol.py
@@ -166,6 +166,10 @@ class ApiV3(object):
raise ValueError("Invalid/unsupported request method specified: {0}".format(method))
raw = requester(url, params=params)
+ # Rate limits are taken from HTTP response headers
+ # https://strava.github.io/api/#rate-limiting
+ self.rate_limiter(raw.headers)
+
if check_for_errors:
self._handle_protocol_error(raw)
@@ -175,12 +179,6 @@ class ApiV3(object):
else:
resp = raw.json()
- # At this stage we should assume that request was successful and we should invoke
- # our rate limiter.
- # The limits are taken from HTTP response headers
- # https://strava.github.io/api/#rate-limiting
- self.rate_limiter(raw.headers)
-
return resp
def _handle_protocol_error(self, response):
|
Invoking rate limiter before checking for errors There doesn't seem to be a reason for only invoking the rate limiter in case of a <I> OK response. API usage is also counted by Strava for various error responses, so let's register these as well.
|
py
|
diff --git a/gitlab/v4/objects/branches.py b/gitlab/v4/objects/branches.py
index <HASH>..<HASH> 100644
--- a/gitlab/v4/objects/branches.py
+++ b/gitlab/v4/objects/branches.py
@@ -84,5 +84,6 @@ class ProjectProtectedBranchManager(NoUpdateMixin, RESTManager):
"allowed_to_push",
"allowed_to_merge",
"allowed_to_unprotect",
+ "code_owner_approval_required",
),
)
|
feat: add code owner approval as attribute The python API was missing the field code_owner_approval_required as implemented in the GitLab REST API.
|
py
|
diff --git a/checkers/exceptions.py b/checkers/exceptions.py
index <HASH>..<HASH> 100644
--- a/checkers/exceptions.py
+++ b/checkers/exceptions.py
@@ -68,7 +68,7 @@ MSGS = {
'Used when except clauses are not in the correct order (from the '
'more specific to the more generic). If you don\'t fix the order, '
'some exceptions may not be catched by the most specific handler.'),
- 'E0702': ('Raising %s while only classes, instances or string are allowed',
+ 'E0702': ('Raising %s while only classes or instances are allowed',
'raising-bad-type',
'Used when something which is neither a class, an instance or a \
string is raised (i.e. a `TypeError` will be raised).'),
|
Amend the message for raising-bad-type, by not specifying strings.
|
py
|
diff --git a/examples/pxe_with_nfs/pxe_with_nfs_infra.py b/examples/pxe_with_nfs/pxe_with_nfs_infra.py
index <HASH>..<HASH> 100644
--- a/examples/pxe_with_nfs/pxe_with_nfs_infra.py
+++ b/examples/pxe_with_nfs/pxe_with_nfs_infra.py
@@ -5,7 +5,7 @@ SUDO = True
# If you change pxe_server value below then check/change Vagrantfile
pxe_server = '192.168.0.240'
-interface = 'eth2'
+interface = 'eth1'
dhcp_start = '192.168.0.220'
dhcp_end = '192.168.0.230'
|
revert a testing value change
|
py
|
diff --git a/alignak/daemons/arbiterdaemon.py b/alignak/daemons/arbiterdaemon.py
index <HASH>..<HASH> 100644
--- a/alignak/daemons/arbiterdaemon.py
+++ b/alignak/daemons/arbiterdaemon.py
@@ -298,8 +298,10 @@ class Arbiter(Daemon): # pylint: disable=R0902
# Maybe conf is already invalid
if not self.conf.conf_is_correct:
- sys.exit("***> One or more problems was encountered "
- "while processing the config files...")
+ err = "Problems encountered while processing the configuration files."
+ logger.error(err)
+ self.conf.show_errors()
+ sys.exit(err)
# Manage all post-conf modules
self.hook_point('early_configuration')
|
Fix-#<I> - dump file parsing errors
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,7 @@ from setuptools import setup, find_packages
if __name__ == '__main__':
setup(
name='glad',
- version='0.1.7a1',
+ version='0.1.8a0',
description='Multi-Language GL/GLES/EGL/GLX/WGL Loader-Generator based on the official specs.',
long_description=__doc__,
packages=find_packages(),
|
setup: Bumped version: <I>a0.
|
py
|
diff --git a/programs/pmag_gui3.0.py b/programs/pmag_gui3.0.py
index <HASH>..<HASH> 100755
--- a/programs/pmag_gui3.0.py
+++ b/programs/pmag_gui3.0.py
@@ -364,10 +364,22 @@ class MagMainFrame(wx.Frame):
self.Hide()
def on_convert_3(self, event):
- pmag.convert_directory_2_to_3('magic_measurements.txt',
- input_dir=self.WD, output_dir=self.WD)
+ # turn files from 2.5 --> 3.0 (rough translation)
+ res = pmag.convert_directory_2_to_3('magic_measurements.txt',
+ input_dir=self.WD, output_dir=self.WD)
+ if not res:
+ wx.MessageBox('2.5 --> 3.0 failed. Do you have a magic_measurements.txt file in your working directory?',
+ 'Info', wx.OK | wx.ICON_INFORMATION)
+ return
+
+ # create a contribution
self.contribution = nb.Contribution(self.WD)
+ # make skeleton files with specimen, sample, site, location data
self.contribution.propagate_measurement_info()
+ # pop up
+ wx.MessageBox('2.5 --> 3.0 translation completed!', 'Info',
+ wx.OK | wx.ICON_INFORMATION)
+
def on_er_data(self, event):
|
Pmag GUI 3 step 1a: give success/fail info
|
py
|
diff --git a/pullv/repo/git.py b/pullv/repo/git.py
index <HASH>..<HASH> 100644
--- a/pullv/repo/git.py
+++ b/pullv/repo/git.py
@@ -32,12 +32,26 @@ class GitRepo(BaseRepo):
def obtain(self):
self.check_destination()
+ import subprocess
+ import sys
url, rev = self.get_url_rev()
- proc = run(
- ['git', 'clone', '-q', url, self['path']],
+ self.info('Cloning')
+ process = subprocess.Popen(
+ ['git', 'clone', '--progress', url, self['path']],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
env=os.environ.copy(), cwd=self['path']
)
+ while True:
+ err = process.stderr.read(1)
+ if err == '' and process.poll() is not None:
+ break
+ if err != '':
+ sys.stderr.write(err)
+ sys.stderr.flush()
+
+ self.info('Cloned\n\t%s' % (process.stdout.read()))
def update_repo(self):
self.check_destination()
|
get progress on git clone
|
py
|
diff --git a/tensorboard/plugins/interactive_inference/utils/inference_utils.py b/tensorboard/plugins/interactive_inference/utils/inference_utils.py
index <HASH>..<HASH> 100644
--- a/tensorboard/plugins/interactive_inference/utils/inference_utils.py
+++ b/tensorboard/plugins/interactive_inference/utils/inference_utils.py
@@ -263,7 +263,7 @@ def get_numeric_feature_names(example):
example: An example.
Returns:
- A list of string feature names.
+ A list of strings of the names of numeric features.
"""
numeric_features = ('float_list', 'int64_list')
features = get_example_features(example)
@@ -495,7 +495,7 @@ def make_json_formatted_for_single_chart(mutant_features,
X-axis.
inference_result_proto: A ClassificationResponse or RegressionResponse
returned by Servo, representing the Y-axis.
- It contains one 'classification' or 'regression' for everyExample that
+ It contains one 'classification' or 'regression' for every Example that
was sent for inference. The length of that field should be the same length
of mutant_features.
index_to_mutate: The index of the feature being mutated for this chart.
|
Fixed typos in docstrings. (#<I>) * Fixed typos in docstrings. * Clarified wording.
|
py
|
diff --git a/flask_login.py b/flask_login.py
index <HASH>..<HASH> 100644
--- a/flask_login.py
+++ b/flask_login.py
@@ -77,6 +77,10 @@ ID_ATTRIBUTE = 'get_id'
#: Default name of the auth header (``Authorization``)
AUTH_HEADER_NAME = 'Authorization'
+# A set of session keys that are populated by Flask-Login. Use this set to
+# purge keys safely and accurately.
+SESSION_KEYS = {'user_id', '_id', '_fresh'}
+
class LoginManager(object):
'''
@@ -400,9 +404,9 @@ class LoginManager(object):
session_protected.send(app)
return False
elif mode == 'strong':
- sess.pop('user_id', None)
- sess.pop('_id', None)
- sess.pop('_fresh', None)
+ for k in SESSION_KEYS:
+ sess.pop(k, None)
+
sess['remember'] = 'clear'
session_protected.send(app)
return True
|
use a set to contain flask-login populated keys
|
py
|
diff --git a/telethon/update_state.py b/telethon/update_state.py
index <HASH>..<HASH> 100644
--- a/telethon/update_state.py
+++ b/telethon/update_state.py
@@ -80,6 +80,7 @@ class UpdateState:
t.join()
self._worker_threads.clear()
+ self._workers = n
def setup_workers(self):
if self._worker_threads or not self._workers:
|
Stopping workers should not clear their count (may fix #<I>)
|
py
|
diff --git a/fc/excel_ui.py b/fc/excel_ui.py
index <HASH>..<HASH> 100644
--- a/fc/excel_ui.py
+++ b/fc/excel_ui.py
@@ -648,9 +648,13 @@ def generate_histograms_table(samples_table, samples):
for sample_id, sample in zip(samples_table.index, samples):
for header, channel in zip(hist_headers, hist_channels):
if pd.notnull(samples_table[header][sample_id]):
+ # Get units in which bins are being reported
+ unit = samples_table[header][sample_id]
# Store bins
bins = sample[:,channel].channel_info[0]['bin_vals']
- hist_table.loc[(sample_id, channel, 'Bins'),
+ hist_table.loc[(sample_id,
+ channel,
+ 'Bin Values ({})'.format(unit)),
columns[0:len(bins)]] = bins
# Calculate and store histogram counts
bin_edges = sample[:,channel].channel_info[0]['bin_edges']
|
Histograms produced by excel_ui now show units for each bin.
|
py
|
diff --git a/spyderlib/spyder.py b/spyderlib/spyder.py
index <HASH>..<HASH> 100644
--- a/spyderlib/spyder.py
+++ b/spyderlib/spyder.py
@@ -1585,6 +1585,16 @@ Please provide any additional information below.
_("Unable to connect to IPython kernel "
"<b>`%s`") % cf)
return
+
+ # Tabify the first frontend to the external console plugin, tabify
+ # the next ones to the last created frontend:
+ if self.ipython_frontends:
+ other = self.ipython_frontends[-1]
+ else:
+ other= self.extconsole
+ self.tabifyDockWidget(other.dockwidget, ipython_plugin.dockwidget)
+ ipython_plugin.switch_to_plugin()
+
self.ipython_frontends.append(ipython_plugin)
def get_ipython_widget(self, kernel_widget):
|
IPython plugin: when a new kernel is started in Console, tabifying the frontend to the Console (for the first created frontend) and the next frontends to the previously created frontend.
|
py
|
diff --git a/lp_copy_packages.py b/lp_copy_packages.py
index <HASH>..<HASH> 100755
--- a/lp_copy_packages.py
+++ b/lp_copy_packages.py
@@ -63,11 +63,19 @@ def get_archives(lp, to_archive_name):
def copy_packages(lp, version, to_archive_name, dry_run=False):
"""Copy the juju-core source and binary packages to and archive."""
from_archive, to_archive = get_archives(lp, to_archive_name)
+ # Look for juju-core2 first.
package_histories = from_archive.getPublishedSources(
- source_name='juju-core', status='Published')
+ source_name='juju-core2', status='Published')
package_histories = [
package for package in package_histories
if package.source_package_version.startswith(version)]
+ # Look for juju-core second.
+ if len(package_histories) == 0:
+ package_histories = from_archive.getPublishedSources(
+ source_name='juju-core', status='Published')
+ package_histories = [
+ package for package in package_histories
+ if package.source_package_version.startswith(version)]
if len(package_histories) == 0:
raise ValueError(
'No packages matching {} were found in {} to copy to {}.'.format(
|
Copy juju-core2 packages to public archives.
|
py
|
diff --git a/lenstronomy/Util/class_creator.py b/lenstronomy/Util/class_creator.py
index <HASH>..<HASH> 100644
--- a/lenstronomy/Util/class_creator.py
+++ b/lenstronomy/Util/class_creator.py
@@ -144,11 +144,10 @@ def create_class_instances(lens_model_list=[], z_lens=None, z_source=None, lens_
def create_image_model(kwargs_data, kwargs_psf, kwargs_numerics, kwargs_model, likelihood_mask=None):
"""
- :param kwargs_data:
- :param kwargs_psf:
- :param kwargs_model:
- :param kwargs_model_indexes:
- :return:
+ :param kwargs_data: ImageData keyword arguments
+ :param kwargs_psf: PSF keyword arguments
+ :param kwargs_model: model keyword arguments
+ :return: ImageLinearFit() instance
"""
data_class = ImageData(**kwargs_data)
psf_class = PSF(**kwargs_psf)
|
added option to define R_sersic convention in elliptical case through the model settings
|
py
|
diff --git a/tldap/query.py b/tldap/query.py
index <HASH>..<HASH> 100644
--- a/tldap/query.py
+++ b/tldap/query.py
@@ -254,6 +254,7 @@ class QuerySet(object):
def _expand_query(self, q):
dst = tldap.Q()
dst.connector = q.connector
+ dst.negated = q.negated
"""
Expands exandable q items, i.e. for relations between objects.
@@ -333,9 +334,6 @@ class QuerySet(object):
if len(dst.children)==0:
# no search terms, all terms were None
return None
- elif len(dst.children)==1 and isinstance(dst.children[0], django.utils.tree.Node) and not dst.negated:
- # just one non-negative term, return it
- return dst.children[0]
else:
# multiple terms
return dst
|
More problems fixed with negated searches.
|
py
|
diff --git a/functionfs/tests/device.py b/functionfs/tests/device.py
index <HASH>..<HASH> 100644
--- a/functionfs/tests/device.py
+++ b/functionfs/tests/device.py
@@ -154,7 +154,10 @@ class FunctionFSTestDevice(functionfs.Function):
def onEnable(self):
print('functionfs: ENABLE')
- print('Real interface 0:', self.ep0.getRealInterfaceNumber(0))
+ try:
+ print('Real interface 0:', self.ep0.getRealInterfaceNumber(0))
+ except IOError:
+ pass
for ep_file in self._ep_list[1:]:
print(ep_file.name + ':')
descriptor = ep_file.getDescriptor()
|
tests: Tolerate getRealInterfaceNumber raising.
|
py
|
diff --git a/tests/base_class.py b/tests/base_class.py
index <HASH>..<HASH> 100644
--- a/tests/base_class.py
+++ b/tests/base_class.py
@@ -56,7 +56,7 @@ class BaseTestAreas(object):
# try to get a single area by id not available in the result
with pytest.raises(overpy.exception.DataIncomplete):
- result.get_node(123456)
+ result.get_area(123456)
# area_ids is an alias for get_node_ids() and should return the same data
for area_ids in (result.area_ids, result.get_area_ids()):
|
test - Fix small issue in area test
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,13 +1,13 @@
-from setuptools import setup
+from setuptools import setup, find_packages
setup(name='dispatch',
- version='0.1',
+ version='0.2.7',
description='A publishing platform for modern newspapers',
url='http://github.com/ubyssey/dispatch',
author='Peter Siemens',
author_email='[email protected]',
license='GPL',
- packages=['dispatch'],
+ packages=find_packages(),
scripts=['dispatch/bin/dispatch-admin'],
include_package_data=True,
install_requires=[
|
Find all packages with find_packages()
|
py
|
diff --git a/py/cmdline/pycleanup.py b/py/cmdline/pycleanup.py
index <HASH>..<HASH> 100755
--- a/py/cmdline/pycleanup.py
+++ b/py/cmdline/pycleanup.py
@@ -11,11 +11,26 @@ import py
def main():
parser = py.compat.optparse.OptionParser(usage=__doc__)
+ parser.add_option("-e", "--remove", dest="ext", default=".pyc", action="store",
+ help="remove files with the given comma-separated list of extensions"
+ )
+ parser.add_option("-n", "--dryrun", dest="dryrun", default=False,
+ action="store_true",
+ help="display would-be-removed filenames"
+ )
(options, args) = parser.parse_args()
if not args:
args = ["."]
+ ext = options.ext.split(",")
+ def shouldremove(p):
+ return p.ext in ext
+
for arg in args:
path = py.path.local(arg)
- print "cleaning path", path
- for x in path.visit('*.pyc', lambda x: x.check(dotfile=0, link=0)):
- x.remove()
+ print "cleaning path", path, "of extensions", ext
+ for x in path.visit(shouldremove, lambda x: x.check(dotfile=0, link=0)):
+ if options.dryrun:
+ print "would remove", x
+ else:
+ print "removing", x
+ x.remove()
|
extend py.cleanup to supply a list of extensions to clean --HG-- branch : trunk
|
py
|
diff --git a/salt/client/mixins.py b/salt/client/mixins.py
index <HASH>..<HASH> 100644
--- a/salt/client/mixins.py
+++ b/salt/client/mixins.py
@@ -394,7 +394,8 @@ class SyncClientMixin(object):
with tornado.stack_context.StackContext(self.functions.context_dict.clone):
data['return'] = self.functions[fun](*args, **kwargs)
data['success'] = True
- if 'data' in data['return']:
+ if isinstance(data['return'], dict) and 'data' in data['return']:
+ # some functions can return boolean values
data['success'] = salt.utils.check_state_result(data['return']['data'])
except (Exception, SystemExit) as ex:
if isinstance(ex, salt.exceptions.NotImplemented):
|
Check if data['return'] is dict type
|
py
|
diff --git a/chatterbot/storage/sql_storage.py b/chatterbot/storage/sql_storage.py
index <HASH>..<HASH> 100644
--- a/chatterbot/storage/sql_storage.py
+++ b/chatterbot/storage/sql_storage.py
@@ -19,7 +19,6 @@ class SQLStorageAdapter(StorageAdapter):
def __init__(self, **kwargs):
super(SQLStorageAdapter, self).__init__(**kwargs)
- from re import search
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
@@ -35,7 +34,7 @@ class SQLStorageAdapter(StorageAdapter):
self.engine = create_engine(self.database_uri, convert_unicode=True)
- if search('^sqlite://', self.database_uri):
+ if self.database_uri.startswith('sqlite://'):
from sqlalchemy.engine import Engine
from sqlalchemy import event
|
Regex is overkill for this check
|
py
|
diff --git a/preventconcurrentlogins/models.py b/preventconcurrentlogins/models.py
index <HASH>..<HASH> 100644
--- a/preventconcurrentlogins/models.py
+++ b/preventconcurrentlogins/models.py
@@ -1,12 +1,9 @@
+from django.conf import settings
+from django.contrib.auth.models import User
from django.db import models
-try:
- from django.contrib.auth import get_user_model
-except ImportError: # django < 1.5
- from django.contrib.auth.models import User
-else:
- User = get_user_model()
+AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', User)
class Visitor(models.Model):
- user = models.OneToOneField(User, null=False, related_name='visitor')
- session_key = models.CharField(null=False, max_length=40)
\ No newline at end of file
+ user = models.OneToOneField(AUTH_USER_MODEL, null=False, related_name='visitor')
+ session_key = models.CharField(null=False, max_length=40)
|
Django <I>: Fix django.core.exceptions.AppRegistryNotReady: Models aren't loaded yet.
|
py
|
diff --git a/standalone_tests/test_git_workflows.py b/standalone_tests/test_git_workflows.py
index <HASH>..<HASH> 100755
--- a/standalone_tests/test_git_workflows.py
+++ b/standalone_tests/test_git_workflows.py
@@ -20,6 +20,8 @@ COMMITS_SHOULD_FAIL_ARG = 'tiny_max_file_size'
COMMITS_SHOULD_FAIL = COMMITS_SHOULD_FAIL_ARG in sys.argv
if COMMITS_SHOULD_FAIL:
sys.argv.remove(COMMITS_SHOULD_FAIL_ARG)
+ if 'phylesystem' not in config.sections():
+ config.add_section('phylesystem')
config.set('phylesystem', 'max_file_size', '10') # ten bytes is not large
_replace_default_config(config)
|
edited test to pass in absence of config file
|
py
|
diff --git a/tests/test_main.py b/tests/test_main.py
index <HASH>..<HASH> 100755
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -5208,11 +5208,15 @@ def test_fist_of_jaraxxus():
def test_far_sight():
game = prepare_game()
game.player1.discard_hand()
- farsight = game.current_player.give("CS2_053")
+ farsight = game.player1.give("CS2_053")
farsight.play()
- assert len(game.current_player.hand) == 1
- assert game.current_player.hand[0].buffs
- assert game.current_player.hand[0].cost >= 0
+ assert len(game.player1.hand) == 1
+ card1 = game.player1.hand[0]
+
+ assert card1.buffs
+ assert card1.cost >= 0
+ card2 = game.player1.give(card1.id)
+ assert card1.cost == max(card2.cost - 3, 0)
def test_fatigue():
|
Change test for Far Sight to check for buff value
|
py
|
diff --git a/tests/test_hdf5.py b/tests/test_hdf5.py
index <HASH>..<HASH> 100644
--- a/tests/test_hdf5.py
+++ b/tests/test_hdf5.py
@@ -44,7 +44,7 @@ class TestH5PYDataset(object):
def setUp(self):
self.h5file = h5py.File(
- 'file.hdf5', mode="w", device='core', backing_store=False)
+ 'file.hdf5', mode='w', driver='core', backing_store=False)
self.h5file['features'] = self.features
self.h5file['features'].dims[0].label = 'batch'
self.h5file['features'].dims[1].label = 'feature'
|
Fix typo: h5py driver kwarg
|
py
|
diff --git a/openquake/calculators/scenario_damage.py b/openquake/calculators/scenario_damage.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/scenario_damage.py
+++ b/openquake/calculators/scenario_damage.py
@@ -88,7 +88,6 @@ def scenario_damage(riskinputs, param, monitor):
for ri in riskinputs:
# here instead F32 floats are ok
acc = [] # (aid, eid, lid, ds...)
- ri.hazard_getter.init()
for out in ri.gen_outputs(crmodel, monitor):
r = out.rlzi
ne = num_events[r] # total number of events
|
Removed unused line [skip CI]
|
py
|
diff --git a/matrix_client/api.py b/matrix_client/api.py
index <HASH>..<HASH> 100644
--- a/matrix_client/api.py
+++ b/matrix_client/api.py
@@ -265,6 +265,17 @@ class MatrixHttpApi(object):
"""
return self._send("GET", "/rooms/" + room_id + "/state/m.room.name")
+ def set_room_name(self, room_id, name):
+ """Perform PUT /rooms/$room_id/state/m.room.name
+ Args:
+ room_id(str): The room ID
+ name(str): The new room name
+ """
+ body = {
+ "name": name
+ }
+ return self._send("PUT", "/rooms/" + room_id + "/state/m.room.name", body)
+
def get_room_topic(self, room_id):
"""Perform GET /rooms/$room_id/state/m.room.topic
Args:
|
Add API for setting the room name create set_room_name for performing PUT /rooms/$room_id/state/m.room.name
|
py
|
diff --git a/angr/analyses/cfg/indirect_jump_resolvers/jumptable.py b/angr/analyses/cfg/indirect_jump_resolvers/jumptable.py
index <HASH>..<HASH> 100644
--- a/angr/analyses/cfg/indirect_jump_resolvers/jumptable.py
+++ b/angr/analyses/cfg/indirect_jump_resolvers/jumptable.py
@@ -296,6 +296,10 @@ class JumpTableResolver(IndirectJumpResolver):
# It's not a jump table, but we resolve it anyway
jump_target_addr = load_stmt.data.addr.con.value
jump_target = cfg._fast_memory_load_pointer(jump_target_addr)
+ if not jump_target:
+ #...except this constant looks like a jumpout!
+ l.info("Constant indirect jump directed out of the binary at #%08x" % addr)
+ return False, []
l.info("Resolved constant indirect jump from %#08x to %#08x" % (addr, jump_target_addr))
ij = cfg.indirect_jumps[addr]
ij.jumptable = False
|
Add check to the constant indirect jump resolver bits so that we make sure we can actually find the target (e.g., don't resolve a jumpout to nowhere)
|
py
|
diff --git a/rest_framework_gis/serializers.py b/rest_framework_gis/serializers.py
index <HASH>..<HASH> 100755
--- a/rest_framework_gis/serializers.py
+++ b/rest_framework_gis/serializers.py
@@ -13,7 +13,10 @@ except ImportError:
from .fields import GeometryField
# map drf-gis GeometryField to GeoDjango Geometry Field
-_geo_field_mapping = ModelSerializer._field_mapping.mapping
+try:
+ _geo_field_mapping = ModelSerializer._field_mapping.mapping
+except AttributeError:
+ _geo_field_mapping = ModelSerializer.serializer_field_mapping
_geo_field_mapping.update({
django_GeometryField: GeometryField
})
|
Stops breakage with drf <I>
|
py
|
diff --git a/MAVProxy/tools/MAVExplorer.py b/MAVProxy/tools/MAVExplorer.py
index <HASH>..<HASH> 100755
--- a/MAVProxy/tools/MAVExplorer.py
+++ b/MAVProxy/tools/MAVExplorer.py
@@ -825,6 +825,10 @@ def cmd_param(args):
set_vehicle_name()
mestate.param_help.param_check(mestate.mlog.params, args[1:])
return
+ if args[0] == 'show':
+ # habits from mavproxy
+ cmd_param(args[1:])
+ return
wildcard = args[0]
if len(args) > 1 and args[1] == "-v":
set_vehicle_name()
|
cope with habits of param show from mavproxy
|
py
|
diff --git a/test_flask_alchy.py b/test_flask_alchy.py
index <HASH>..<HASH> 100644
--- a/test_flask_alchy.py
+++ b/test_flask_alchy.py
@@ -1,7 +1,4 @@
-# remove once alchy released to pypi
-import sys; sys.path.append('../alchy')
-
from unittest import TestCase
from flask import Flask
|
Remove temp code needed to test prior to alchy release.
|
py
|
diff --git a/insights/parsers/uname.py b/insights/parsers/uname.py
index <HASH>..<HASH> 100644
--- a/insights/parsers/uname.py
+++ b/insights/parsers/uname.py
@@ -113,6 +113,7 @@ rhel_release_map = {
"4.18.0-240": "8.3",
"4.18.0-305": "8.4",
"4.18.0-348": "8.5",
+ "4.18.0-372": "8.6",
}
release_to_kernel_map = dict((v, k) for k, v in rhel_release_map.items())
|
feat: RHEL <I> is GA (#<I>)
|
py
|
diff --git a/horizon/loaders.py b/horizon/loaders.py
index <HASH>..<HASH> 100644
--- a/horizon/loaders.py
+++ b/horizon/loaders.py
@@ -19,7 +19,10 @@ import os
import django
from django.conf import settings
-from django.template.base import TemplateDoesNotExist # noqa
+if django.VERSION >= (1, 9):
+ from django.template.exceptions import TemplateDoesNotExist
+else:
+ from django.template.base import TemplateDoesNotExist # noqa
if django.get_version() >= '1.8':
from django.template.engine import Engine
|
[Django <I>] Do not use TemplateDoesNotExist from Django TemplateDoesNotExist was removed from Django <I>. This is a trivial class which exists only to raise exception, and which can easily be redefined in our code. Change-Id: I<I>d7e6e<I>f<I>fd2b<I>b<I> Closes-Bug: #<I>
|
py
|
diff --git a/zinnia/__init__.py b/zinnia/__init__.py
index <HASH>..<HASH> 100644
--- a/zinnia/__init__.py
+++ b/zinnia/__init__.py
@@ -1,5 +1,5 @@
"""Zinnia"""
-__version__ = '0.6'
+__version__ = '0.7'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
|
releasing zinnia <I>
|
py
|
diff --git a/b2handle/handleexceptions.py b/b2handle/handleexceptions.py
index <HASH>..<HASH> 100644
--- a/b2handle/handleexceptions.py
+++ b/b2handle/handleexceptions.py
@@ -30,7 +30,7 @@ class IllegalOperationException(Exception):
when he wants to create or remove 10320/loc entries using the
wrong method, ...
'''
- def __init__(self, operation=None, handle=None, custom_message=None): # TODO ORDER CHANGED! bef line 605
+ def __init__(self, operation=None, handle=None, custom_message=None):
self.msg = "Illegal Operation"
self.handle = handle
self.custom_message = custom_message
@@ -196,6 +196,8 @@ class HandleAuthentificationError(Exception):
super(self.__class__, self).__init__(self.msg)
class CredentialsFormatError(Exception):
+ '''
+ To be raised if credentials are ill-formatted or miss essential items.'''
def __init__(self, custom_message=None):
self.msg = 'Ill-formatted credentials'
self.custom_message = custom_message
|
Adapted docstring/comments of some exceptions.
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.