{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'PDF TO Markdown' && linkText !== 'PDF TO Markdown' ) { link.textContent = 'PDF TO Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Voice Cloning' ) { link.textContent = 'Voice Cloning'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'PDF TO Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \"\n res = u\"

\"+text+u\"

\"\n return HTML(data=res)"},"code_compressed":{"kind":"null"}}},{"rowIdx":1494,"cells":{"id":{"kind":"number","value":1494,"string":"1,494"},"code":{"kind":"string","value":"import os\nimport threading\nimport time\nfrom typing import Optional\n\nimport psutil\n\nfrom galaxy import (\n job_metrics,\n model,\n)\nfrom galaxy.app_unittest_utils.tools_support import UsesTools\nfrom galaxy.jobs.runners import local\nfrom galaxy.util import bunch\nfrom galaxy.util.unittest import TestCase\n\n\nclass TestLocalJobRunner(TestCase, UsesTools):\n def setUp(self):\n self.setup_app()\n self._init_tool()\n self.app.job_metrics = job_metrics.JobMetrics()\n self.job_wrapper = MockJobWrapper(self.app, self.test_directory, self.tool)\n\n def tearDown(self):\n self.tear_down_app()\n\n def test_run(self):\n self.job_wrapper.command_line = \"echo HelloWorld\"\n runner = local.LocalJobRunner(self.app, 1)\n runner.queue_job(self.job_wrapper)\n assert self.job_wrapper.stdout.strip() == \"HelloWorld\"\n\n def test_galaxy_lib_on_path(self):\n self.job_wrapper.command_line = '''python -c \"import galaxy.util\"'''\n runner = local.LocalJobRunner(self.app, 1)\n runner.queue_job(self.job_wrapper)\n assert self.job_wrapper.exit_code == 0\n\n def test_default_slots(self):\n self.job_wrapper.command_line = \"\"\"echo $GALAXY_SLOTS\"\"\"\n runner = local.LocalJobRunner(self.app, 1)\n runner.queue_job(self.job_wrapper)\n assert self.job_wrapper.stdout.strip() == \"1\"\n\n def test_slots_override(self):\n # Set local_slots in job destination to specify slots for\n # local job runner.\n self.job_wrapper.job_destination.params[\"local_slots\"] = 3\n self.job_wrapper.command_line = \"\"\"echo $GALAXY_SLOTS\"\"\"\n runner = local.LocalJobRunner(self.app, 1)\n runner.queue_job(self.job_wrapper)\n assert self.job_wrapper.stdout.strip() == \"3\"\n\n def test_exit_code(self):\n self.job_wrapper.command_line = '''sh -c \"exit 4\"'''\n runner = local.LocalJobRunner(self.app, 1)\n runner.queue_job(self.job_wrapper)\n assert self.job_wrapper.exit_code == 4\n\n def test_metadata_gets_set(self):\n runner = local.LocalJobRunner(self.app, 1)\n runner.queue_job(self.job_wrapper)\n assert os.path.exists(self.job_wrapper.mock_metadata_path)\n\n def test_metadata_gets_set_if_embedded(self):\n self.job_wrapper.job_destination.params[\"embed_metadata_in_job\"] = \"True\"\n\n # Kill off cruft for _handle_metadata_externally and make sure job still works...\n self.job_wrapper.external_output_metadata = None\n self.app.datatypes_registry.set_external_metadata_tool = None\n\n runner = local.LocalJobRunner(self.app, 1)\n runner.queue_job(self.job_wrapper)\n assert os.path.exists(self.job_wrapper.mock_metadata_path)\n\n def test_stopping_job(self):\n self.job_wrapper.command_line = '''python -c \"import time; time.sleep(15)\"'''\n runner = local.LocalJobRunner(self.app, 1)\n\n def queue():\n runner.queue_job(self.job_wrapper)\n\n t = threading.Thread(target=queue)\n t.start()\n external_id = self.job_wrapper.wait_for_external_id()\n assert psutil.pid_exists(external_id)\n runner.stop_job(self.job_wrapper)\n t.join(1)\n assert not psutil.pid_exists(external_id)\n\n def test_shutdown_no_jobs(self):\n self.app.config.monitor_thread_join_timeout = 5\n runner = local.LocalJobRunner(self.app, 1)\n runner.start()\n runner.shutdown()\n\n def test_stopping_job_at_shutdown(self):\n self.job_wrapper.command_line = '''python -c \"import time; time.sleep(15)\"'''\n self.app.model.session = bunch.Bunch(add=lambda x: None, flush=lambda: None)\n runner = local.LocalJobRunner(self.app, 1)\n runner.start()\n self.app.config.monitor_thread_join_timeout = 15\n\n def queue():\n runner.queue_job(self.job_wrapper)\n\n t = threading.Thread(target=queue)\n t.start()\n external_id = self.job_wrapper.wait_for_external_id()\n assert psutil.pid_exists(external_id)\n runner.shutdown()\n t.join(1)\n assert not psutil.pid_exists(external_id)\n assert \"job terminated by Galaxy shutdown\" in self.job_wrapper.fail_message\n\n\nclass MockJobWrapper:\n def __init__(self, app, test_directory, tool):\n working_directory = os.path.join(test_directory, \"workdir\")\n tool_working_directory = os.path.join(working_directory, \"working\")\n os.makedirs(tool_working_directory)\n self.app = app\n self.tool = tool\n self.requires_containerization = False\n self.state = model.Job.states.QUEUED\n self.command_line = \"echo HelloWorld\"\n self.environment_variables = []\n self.commands_in_new_shell = False\n self.prepare_called = False\n self.dependency_shell_commands = None\n self.working_directory = working_directory\n self.tool_working_directory = tool_working_directory\n self.requires_setting_metadata = True\n self.job_destination = bunch.Bunch(id=\"default\", params={})\n self.galaxy_lib_dir = os.path.abspath(\"lib\")\n self.job = model.Job()\n self.job_id = 1\n self.job.id = 1\n self.output_paths = [\"/tmp/output1.dat\"]\n self.mock_metadata_path = os.path.abspath(os.path.join(test_directory, \"METADATA_SET\"))\n self.metadata_command = f\"touch {self.mock_metadata_path}\"\n self.galaxy_virtual_env = None\n self.shell = \"/bin/bash\"\n self.cleanup_job = \"never\"\n self.tmp_dir_creation_statement = \"\"\n self.use_metadata_binary = False\n self.guest_ports = []\n self.metadata_strategy = \"directory\"\n self.remote_command_line = False\n\n # Cruft for setting metadata externally, axe at some point.\n self.external_output_metadata: Optional[bunch.Bunch] = bunch.Bunch()\n self.app.datatypes_registry.set_external_metadata_tool = bunch.Bunch(build_dependency_shell_commands=lambda: [])\n\n def check_tool_output(*args, **kwds):\n return \"ok\"\n\n def wait_for_external_id(self):\n \"\"\"Test method for waiting until an external id has been registered.\"\"\"\n external_id = None\n for _ in range(50):\n external_id = self.job.job_runner_external_id\n if external_id:\n break\n time.sleep(0.1)\n return external_id\n\n def prepare(self):\n self.prepare_called = True\n\n def set_external_id(self, external_id, **kwd):\n self.job.job_runner_external_id = external_id\n\n def get_command_line(self):\n return self.command_line\n\n def container_monitor_command(self, *args, **kwds):\n return None\n\n def get_id_tag(self):\n return \"1\"\n\n def get_state(self):\n return self.state\n\n def change_state(self, state, job=None):\n self.state = state\n\n @property\n def job_io(self):\n return bunch.Bunch(\n get_output_fnames=lambda: [], check_job_script_integrity=False, version_path=\"/tmp/version_path\"\n )\n\n def METHOD_NAME(self):\n return self.job\n\n def setup_external_metadata(self, **kwds):\n return self.metadata_command\n\n def get_env_setup_clause(self):\n return \"\"\n\n def has_limits(self):\n return False\n\n def fail(\n self, message, exception=False, tool_stdout=\"\", tool_stderr=\"\", exit_code=None, job_stdout=None, job_stderr=None\n ):\n self.fail_message = message\n self.fail_exception = exception\n\n def finish(self, stdout, stderr, exit_code, **kwds):\n self.stdout = stdout\n self.stderr = stderr\n self.exit_code = exit_code\n\n def tmp_directory(self):\n return None\n\n def home_directory(self):\n return None\n\n def reclaim_ownership(self):\n pass\n\n @property\n def is_cwl_job(self):\n return False"},"code_compressed":{"kind":"null"}}},{"rowIdx":1495,"cells":{"id":{"kind":"number","value":1495,"string":"1,495"},"code":{"kind":"string","value":"# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\nfrom aliyunsdkcore.request import RpcRequest\nfrom aliyunsdkconfig.endpoint import endpoint_data\n\nclass CreateAggregateConfigDeliveryChannelRequest(RpcRequest):\n\n\tdef __init__(self):\n\t\tRpcRequest.__init__(self, 'Config', '2020-09-07', 'CreateAggregateConfigDeliveryChannel')\n\t\tself.set_method('POST')\n\n\t\tif hasattr(self, \"endpoint_map\"):\n\t\t\tsetattr(self, \"endpoint_map\", endpoint_data.getEndpointMap())\n\t\tif hasattr(self, \"endpoint_regional\"):\n\t\t\tsetattr(self, \"endpoint_regional\", endpoint_data.getEndpointRegional())\n\n\tdef get_NonCompliantNotification(self): # Boolean\n\t\treturn self.get_query_params().get('NonCompliantNotification')\n\n\tdef set_NonCompliantNotification(self, NonCompliantNotification): # Boolean\n\t\tself.add_query_param('NonCompliantNotification', NonCompliantNotification)\n\tdef get_ClientToken(self): # String\n\t\treturn self.get_query_params().get('ClientToken')\n\n\tdef set_ClientToken(self, ClientToken): # String\n\t\tself.add_query_param('ClientToken', ClientToken)\n\tdef get_ConfigurationSnapshot(self): # Boolean\n\t\treturn self.get_query_params().get('ConfigurationSnapshot')\n\n\tdef set_ConfigurationSnapshot(self, ConfigurationSnapshot): # Boolean\n\t\tself.add_query_param('ConfigurationSnapshot', ConfigurationSnapshot)\n\tdef get_Description(self): # String\n\t\treturn self.get_query_params().get('Description')\n\n\tdef set_Description(self, Description): # String\n\t\tself.add_query_param('Description', Description)\n\tdef get_AggregatorId(self): # String\n\t\treturn self.get_query_params().get('AggregatorId')\n\n\tdef set_AggregatorId(self, AggregatorId): # String\n\t\tself.add_query_param('AggregatorId', AggregatorId)\n\tdef get_DeliveryChannelTargetArn(self): # String\n\t\treturn self.get_query_params().get('DeliveryChannelTargetArn')\n\n\tdef set_DeliveryChannelTargetArn(self, DeliveryChannelTargetArn): # String\n\t\tself.add_query_param('DeliveryChannelTargetArn', DeliveryChannelTargetArn)\n\tdef get_DeliveryChannelCondition(self): # String\n\t\treturn self.get_query_params().get('DeliveryChannelCondition')\n\n\tdef set_DeliveryChannelCondition(self, DeliveryChannelCondition): # String\n\t\tself.add_query_param('DeliveryChannelCondition', DeliveryChannelCondition)\n\tdef get_ConfigurationItemChangeNotification(self): # Boolean\n\t\treturn self.get_query_params().get('ConfigurationItemChangeNotification')\n\n\tdef set_ConfigurationItemChangeNotification(self, ConfigurationItemChangeNotification): # Boolean\n\t\tself.add_query_param('ConfigurationItemChangeNotification', ConfigurationItemChangeNotification)\n\tdef get_DeliveryChannelName(self): # String\n\t\treturn self.get_query_params().get('DeliveryChannelName')\n\n\tdef set_DeliveryChannelName(self, DeliveryChannelName): # String\n\t\tself.add_query_param('DeliveryChannelName', DeliveryChannelName)\n\tdef METHOD_NAME(self): # String\n\t\treturn self.get_query_params().get('DeliverySnapshotTime')\n\n\tdef set_DeliverySnapshotTime(self, DeliverySnapshotTime): # String\n\t\tself.add_query_param('DeliverySnapshotTime', DeliverySnapshotTime)\n\tdef get_OversizedDataOSSTargetArn(self): # String\n\t\treturn self.get_query_params().get('OversizedDataOSSTargetArn')\n\n\tdef set_OversizedDataOSSTargetArn(self, OversizedDataOSSTargetArn): # String\n\t\tself.add_query_param('OversizedDataOSSTargetArn', OversizedDataOSSTargetArn)\n\tdef get_DeliveryChannelType(self): # String\n\t\treturn self.get_query_params().get('DeliveryChannelType')\n\n\tdef set_DeliveryChannelType(self, DeliveryChannelType): # String\n\t\tself.add_query_param('DeliveryChannelType', DeliveryChannelType)"},"code_compressed":{"kind":"null"}}},{"rowIdx":1496,"cells":{"id":{"kind":"number","value":1496,"string":"1,496"},"code":{"kind":"string","value":"# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\nfrom aliyunsdkcore.request import RpcRequest\nfrom aliyunsdkimm.endpoint import endpoint_data\nimport json\n\nclass GenerateWebofficeTokenRequest(RpcRequest):\n\n\tdef __init__(self):\n\t\tRpcRequest.__init__(self, 'imm', '2020-09-30', 'GenerateWebofficeToken','imm')\n\t\tself.set_method('POST')\n\n\t\tif hasattr(self, \"endpoint_map\"):\n\t\t\tsetattr(self, \"endpoint_map\", endpoint_data.getEndpointMap())\n\t\tif hasattr(self, \"endpoint_regional\"):\n\t\t\tsetattr(self, \"endpoint_regional\", endpoint_data.getEndpointRegional())\n\n\tdef get_Referer(self): # String\n\t\treturn self.get_query_params().get('Referer')\n\n\tdef set_Referer(self, Referer): # String\n\t\tself.add_query_param('Referer', Referer)\n\tdef get_Notification(self): # Struct\n\t\treturn self.get_query_params().get('Notification')\n\n\tdef set_Notification(self, Notification): # Struct\n\t\tself.add_query_param(\"Notification\", json.dumps(Notification))\n\tdef get_Password(self): # String\n\t\treturn self.get_query_params().get('Password')\n\n\tdef set_Password(self, Password): # String\n\t\tself.add_query_param('Password', Password)\n\tdef get_ProjectName(self): # String\n\t\treturn self.get_query_params().get('ProjectName')\n\n\tdef set_ProjectName(self, ProjectName): # String\n\t\tself.add_query_param('ProjectName', ProjectName)\n\tdef get_Watermark(self): # Struct\n\t\treturn self.get_query_params().get('Watermark')\n\n\tdef set_Watermark(self, Watermark): # Struct\n\t\tself.add_query_param(\"Watermark\", json.dumps(Watermark))\n\tdef get_NotifyTopicName(self): # String\n\t\treturn self.get_query_params().get('NotifyTopicName')\n\n\tdef METHOD_NAME(self, NotifyTopicName): # String\n\t\tself.add_query_param('NotifyTopicName', NotifyTopicName)\n\tdef get_Filename(self): # String\n\t\treturn self.get_query_params().get('Filename')\n\n\tdef set_Filename(self, Filename): # String\n\t\tself.add_query_param('Filename', Filename)\n\tdef get_SourceURI(self): # String\n\t\treturn self.get_query_params().get('SourceURI')\n\n\tdef set_SourceURI(self, SourceURI): # String\n\t\tself.add_query_param('SourceURI', SourceURI)\n\tdef get_ExternalUploaded(self): # Boolean\n\t\treturn self.get_query_params().get('ExternalUploaded')\n\n\tdef set_ExternalUploaded(self, ExternalUploaded): # Boolean\n\t\tself.add_query_param('ExternalUploaded', ExternalUploaded)\n\tdef get_UserData(self): # String\n\t\treturn self.get_query_params().get('UserData')\n\n\tdef set_UserData(self, UserData): # String\n\t\tself.add_query_param('UserData', UserData)\n\tdef get_PreviewPages(self): # Long\n\t\treturn self.get_query_params().get('PreviewPages')\n\n\tdef set_PreviewPages(self, PreviewPages): # Long\n\t\tself.add_query_param('PreviewPages', PreviewPages)\n\tdef get_Hidecmb(self): # Boolean\n\t\treturn self.get_query_params().get('Hidecmb')\n\n\tdef set_Hidecmb(self, Hidecmb): # Boolean\n\t\tself.add_query_param('Hidecmb', Hidecmb)\n\tdef get_CachePreview(self): # Boolean\n\t\treturn self.get_query_params().get('CachePreview')\n\n\tdef set_CachePreview(self, CachePreview): # Boolean\n\t\tself.add_query_param('CachePreview', CachePreview)\n\tdef get_Permission(self): # Struct\n\t\treturn self.get_query_params().get('Permission')\n\n\tdef set_Permission(self, Permission): # Struct\n\t\tself.add_query_param(\"Permission\", json.dumps(Permission))\n\tdef get_CredentialConfig(self): # Struct\n\t\treturn self.get_query_params().get('CredentialConfig')\n\n\tdef set_CredentialConfig(self, CredentialConfig): # Struct\n\t\tself.add_query_param(\"CredentialConfig\", json.dumps(CredentialConfig))\n\tdef get_User(self): # Struct\n\t\treturn self.get_query_params().get('User')\n\n\tdef set_User(self, User): # Struct\n\t\tself.add_query_param(\"User\", json.dumps(User))"},"code_compressed":{"kind":"null"}}},{"rowIdx":1497,"cells":{"id":{"kind":"number","value":1497,"string":"1,497"},"code":{"kind":"string","value":"import os\nimport subprocess\nimport pysam\n\nfrom TestUtils import force_str\n\n\ndef build_pileup_with_samtoolsshell(fn):\n os.system(\"samtools mpileup {} 2> /dev/null | wc -l > /dev/null\".format(fn))\n return 2998\n\n\ndef build_pileup_with_samtoolspipe(fn):\n FNULL = open(os.devnull, 'w')\n with subprocess.Popen([\"samtools\", \"mpileup\", fn],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=FNULL) as proc:\n return len(proc.stdout.readlines())\n\n \ndef build_pileup_with_pysam(*args, **kwargs):\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n return len(list(inf.pileup(stepper=\"samtools\")))\n\n\ndef build_depth_with_samtoolsshell(fn):\n os.system(\n \"samtools mpileup {} 2> /dev/null | awk '{{a += $4}} END {{print a}}' > /dev/null\".format(fn))\n return 107241\n\n\ndef build_depth_with_samtoolspipe(fn):\n FNULL = open(os.devnull, 'w')\n with subprocess.Popen([\"samtools\", \"mpileup\", fn],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=FNULL) as proc:\n data = [x.split() for x in proc.stdout.readlines()]\n return [int(x[3]) for x in data]\n\n\ndef build_depth_with_filter_with_pysam(*args, **kwargs):\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n return [x.get_num_aligned() for x in inf.pileup(stepper=\"samtools\")]\n\n\ndef build_depth_with_pysam(*args, **kwargs):\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n return [x.nsegments for x in inf.pileup(stepper=\"samtools\")]\n\n\ndef build_query_bases_with_samtoolsshell(fn):\n os.system(\"samtools mpileup {} 2> /dev/null | awk '{{a = a $5}} END {{print a}}' | wc -c > /dev/null\".format(fn))\n return 116308\n\n\ndef build_query_bases_with_samtoolspipe(fn, *args, **kwargs):\n FNULL = open(os.devnull, 'w')\n with subprocess.Popen([\"samtools\", \"mpileup\", fn] + list(args),\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=FNULL) as proc:\n stdout = proc.stdout.read().decode()\n return [x.split()[4] for x in stdout.splitlines()]\n\n\ndef build_query_bases_with_samtoolspysam(fn, *args):\n return [x.split()[4] for x in pysam.samtools.mpileup(fn, *args).splitlines()]\n \n\ndef build_query_bases_with_pysam_pileups(*args, **kwargs):\n total_pileup = []\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n total_pileup = [\n [r.alignment.query_sequence[r.query_position_or_next]\n for r in column.pileups if r.query_position_or_next is not None]\n for column in inf.pileup(stepper=\"samtools\")]\n return total_pileup\n\n\ndef METHOD_NAME(*args, **kwargs):\n total_pileup = []\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n total_pileup = [\n [r.alignment.query_qualities[r.query_position_or_next]\n for r in column.pileups if r.query_position_or_next is not None]\n for column in inf.pileup(stepper=\"samtools\")]\n return total_pileup\n\n\ndef build_query_bases_with_pysam(fn, *args, **kwargs):\n total_pileup = []\n with pysam.AlignmentFile(fn) as inf:\n total_pileup = [column.get_query_sequences(\n mark_ends=True, add_indels=True, mark_matches=True) for column in\n inf.pileup(*args, **kwargs)]\n return total_pileup\n\n\ndef build_query_names_with_pysam(*args, **kwargs):\n total_pileup = []\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n total_pileup = [column.get_query_names() for column in\n inf.pileup(stepper=\"samtools\")]\n return total_pileup\n\n\ndef build_query_qualities_with_samtoolspipe(fn):\n FNULL = open(os.devnull, 'w')\n with subprocess.Popen([\"samtools\", \"mpileup\", fn],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=FNULL) as proc:\n data = [force_str(x).split()[5] for x in proc.stdout.readlines()]\n return data\n\n\ndef build_query_qualities_with_pysam(*args, **kwargs):\n total_pileup = []\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n total_pileup = [column.get_query_qualities() for column in\n inf.pileup(stepper=\"samtools\")]\n return total_pileup\n\n\ndef build_mapping_qualities_with_samtoolspipe(fn):\n FNULL = open(os.devnull, 'w')\n with subprocess.Popen([\"samtools\", \"mpileup\", \"-s\", fn],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=FNULL) as proc:\n data = [force_str(x).split()[6] for x in proc.stdout.readlines()]\n return data\n\n\ndef build_mapping_qualities_with_pysam(*args, **kwargs):\n total_pileup = []\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n total_pileup = [column.get_mapping_qualities() for column in\n inf.pileup(stepper=\"samtools\")]\n return total_pileup\n\n\ndef build_query_positions_with_samtoolspipe(fn):\n FNULL = open(os.devnull, 'w')\n with subprocess.Popen([\"samtools\", \"mpileup\", \"-O\", fn],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=FNULL) as proc:\n data = [list(map(int, force_str(x).split()[6].split(\",\")))\n for x in proc.stdout.readlines()]\n return data\n\n\ndef build_query_positions_with_pysam(*args, **kwargs):\n total_pileup = []\n with pysam.AlignmentFile(*args, **kwargs) as inf:\n total_pileup = [column.get_query_positions() for column in\n inf.pileup(stepper=\"samtools\")]\n return total_pileup"},"code_compressed":{"kind":"null"}}},{"rowIdx":1498,"cells":{"id":{"kind":"number","value":1498,"string":"1,498"},"code":{"kind":"string","value":"# --------------------------------------------------------------\n# Index position functions\n# --------------------------------------------------------------\n\n\ndef indexPosition1D(i, N):\n \"\"\"This function is a generic function which determines if index\n over a list of length N is an interior point or node 0 or node 1.\n \"\"\"\n if 0 < i < N - 1: # Interior\n return 0, None\n elif i == 0: # Node 0\n return 1, 0\n elif i == N - 1: # Node 1\n return 1, 1\n\n\ndef indexPosition2D(i, j, N, M):\n \"\"\"This function is a generic function which determines if for a grid\n of data NxM with index i going 0->N-1 and j going 0->M-1, it\n determines if i,j is on the interior, on an edge or on a corner\n\n The function return four values:\n type: this is 0 for interior, 1 for on an edge and 2 for on a corner\n edge: this is the edge number if type==1\n node: this is the node number if type==2\n index: this is the value index along the edge of interest --\n only defined for edges\"\"\"\n\n if 0 < i < N - 1 and 0 < j < M - 1: # Interior\n return 0, None, None, None\n elif 0 < i < N - 1 and j == 0: # Edge 0\n return 1, 0, None, i\n elif 0 < i < N - 1 and j == M - 1: # Edge 1\n return 1, 1, None, i\n elif i == 0 and 0 < j < M - 1: # Edge 2\n return 1, 2, None, j\n elif i == N - 1 and 0 < j < M - 1: # Edge 3\n return 1, 3, None, j\n elif i == 0 and j == 0: # Node 0\n return 2, None, 0, None\n elif i == N - 1 and j == 0: # Node 1\n return 2, None, 1, None\n elif i == 0 and j == M - 1: # Node 2\n return 2, None, 2, None\n elif i == N - 1 and j == M - 1: # Node 3\n return 2, None, 3, None\n\n\ndef METHOD_NAME(i, j, k, N, M, L):\n \"\"\"This function is a generic function which determines if for a\n 3D grid of data NxMXL with index i going 0->N-1 and j going 0->M-1\n k going 0->L-1, it determines if i,j,k is on the interior, on a\n face, on an edge or on a corner\n\n Returns\n -------\n type : int\n this is 0 for interior, 1 for on an face, 3 for an edge and 4 for on a corner\n number : int\n this is the face number if type==1,\n this is the edge number if type==2,\n this is the node number if type==3\n\n index1 : int\n this is the value index along 0th dir the face of interest OR edge of interest\n index2 : int\n this is the value index along 1st dir the face of interest\n \"\"\"\n\n # Note to interior->Faces->Edges->Nodes to minimize number of if checks\n\n # Interior:\n if 0 < i < N - 1 and 0 < j < M - 1 and 0 < k < L - 1:\n return 0, None, None, None\n\n elif 0 < i < N - 1 and 0 < j < M - 1 and k == 0: # Face 0\n return 1, 0, i, j\n elif 0 < i < N - 1 and 0 < j < M - 1 and k == L - 1: # Face 1\n return 1, 1, i, j\n elif i == 0 and 0 < j < M - 1 and 0 < k < L - 1: # Face 2\n return 1, 2, j, k\n elif i == N - 1 and 0 < j < M - 1 and 0 < k < L - 1: # Face 3\n return 1, 3, j, k\n elif 0 < i < N - 1 and j == 0 and 0 < k < L - 1: # Face 4\n return 1, 4, i, k\n elif 0 < i < N - 1 and j == M - 1 and 0 < k < L - 1: # Face 5\n return 1, 5, i, k\n\n elif 0 < i < N - 1 and j == 0 and k == 0: # Edge 0\n return 2, 0, i, None\n elif 0 < i < N - 1 and j == M - 1 and k == 0: # Edge 1\n return 2, 1, i, None\n elif i == 0 and 0 < j < M - 1 and k == 0: # Edge 2\n return 2, 2, j, None\n elif i == N - 1 and 0 < j < M - 1 and k == 0: # Edge 3\n return 2, 3, j, None\n elif 0 < i < N - 1 and j == 0 and k == L - 1: # Edge 4\n return 2, 4, i, None\n elif 0 < i < N - 1 and j == M - 1 and k == L - 1: # Edge 5\n return 2, 5, i, None\n elif i == 0 and 0 < j < M - 1 and k == L - 1: # Edge 6\n return 2, 6, j, None\n elif i == N - 1 and 0 < j < M - 1 and k == L - 1: # Edge 7\n return 2, 7, j, None\n elif i == 0 and j == 0 and 0 < k < L - 1: # Edge 8\n return 2, 8, k, None\n elif i == N - 1 and j == 0 and 0 < k < L - 1: # Edge 9\n return 2, 9, k, None\n elif i == 0 and j == M - 1 and 0 < k < L - 1: # Edge 10\n return 2, 10, k, None\n elif i == N - 1 and j == M - 1 and 0 < k < L - 1: # Edge 11\n return 2, 11, k, None\n\n elif i == 0 and j == 0 and k == 0: # Node 0\n return 3, 0, None, None\n elif i == N - 1 and j == 0 and k == 0: # Node 1\n return 3, 1, None, None\n elif i == 0 and j == M - 1 and k == 0: # Node 2\n return 3, 2, None, None\n elif i == N - 1 and j == M - 1 and k == 0: # Node 3\n return 3, 3, None, None\n elif i == 0 and j == 0 and k == L - 1: # Node 4\n return 3, 4, None, None\n elif i == N - 1 and j == 0 and k == L - 1: # Node 5\n return 3, 5, None, None\n elif i == 0 and j == M - 1 and k == L - 1: # Node 6\n return 3, 6, None, None\n elif i == N - 1 and j == M - 1 and k == L - 1: # Node 7\n return 3, 7, None, None"},"code_compressed":{"kind":"null"}}},{"rowIdx":1499,"cells":{"id":{"kind":"number","value":1499,"string":"1,499"},"code":{"kind":"string","value":"'''Adapted from https://github.com/lucidrains/local-attention.'''\n\nimport math\nfrom typing import Optional\n\nimport torch\nfrom torch import nn, einsum\nimport torch.nn.functional as F\n\nfrom einops import rearrange, repeat, pack, unpack\n\nfrom archai.discrete_search.search_spaces.config import ArchConfig\n\nTOKEN_SELF_ATTN_VALUE = -5e4\n\n\nclass SinusoidalEmbeddings(nn.Module):\n def __init__(self, dim):\n super().__init__()\n inv_freq = 1. / (10000 ** (torch.arange(0, dim, 2).float() / dim))\n self.register_buffer('inv_freq', inv_freq)\n\n def forward(self, x):\n n = x.shape[-2]\n t = torch.arange(n, device = x.device).type_as(self.inv_freq)\n freqs = torch.einsum('i , j -> i j', t, self.inv_freq)\n return torch.cat((freqs, freqs), dim=-1)\n\ndef rotate_half(x):\n x = rearrange(x, 'b ... (r d) -> b (...) r d', r = 2)\n x1, x2 = x.unbind(dim = -2)\n return torch.cat((-x2, x1), dim = -1)\n\ndef apply_rotary_pos_emb(q, k, freqs):\n q, k = map(lambda t: (t * freqs.cos()) + (rotate_half(t) * freqs.sin()), (q, k))\n return q, k\n\ndef max_neg_value(tensor):\n return -torch.finfo(tensor.dtype).max\n\ndef pad_to_multiple(tensor, multiple, dim=-1, value=0):\n seqlen = tensor.shape[dim]\n m = seqlen / multiple\n if m.is_integer():\n return False, tensor\n remainder = math.ceil(m) * multiple - seqlen\n pad_offset = (0,) * (-1 - dim) * 2\n return True, F.pad(tensor, (*pad_offset, 0, remainder), value = value)\n\ndef METHOD_NAME(x, backward = 1, forward = 0, pad_value = -1, dim = 2):\n t = x.shape[1]\n dims = (len(x.shape) - dim) * (0, 0)\n padded_x = F.pad(x, (*dims, backward, forward), value = pad_value)\n tensors = [padded_x[:, ind:(ind + t), ...] for ind in range(forward + backward + 1)]\n return torch.cat(tensors, dim = dim)\n\n\nclass LocalAttention(nn.Module):\n def __init__(\n self,\n window_size,\n causal = False,\n look_backward = 1,\n look_forward = None,\n dropout = 0.,\n autopad = False,\n exact_windowsize = False,\n pad_value: int = -1,\n rel_pos_emb_dim: Optional[int] = None,\n **kwargs\n ):\n super().__init__()\n look_forward = look_forward or (0 if causal else 1)\n assert not (causal and look_forward > 0)\n\n self.window_size = window_size\n self.autopad = autopad\n self.exact_windowsize = exact_windowsize\n\n self.causal = causal\n\n self.look_backward = look_backward\n self.look_forward = look_forward\n self.pad_value = pad_value\n\n self.dropout = nn.Dropout(dropout)\n \n self.rel_pos = None\n \n if rel_pos_emb_dim is not None: # backwards compatible with old `rel_pos_emb_config` deprecated argument\n self.rel_pos = SinusoidalEmbeddings(rel_pos_emb_dim)\n\n def forward(self, q, k, v, bin_attention_mask: Optional[torch.FloatTensor] = None):\n # https://github.com/arogozhnikov/einops/blob/master/docs/4-pack-and-unpack.ipynb\n (q, packed_shape), (k, _), (v, _) = map(lambda t: pack([t], '* n d'), (q, k, v))\n\n if self.rel_pos is not None:\n pos_emb = self.rel_pos(q)\n q, k = apply_rotary_pos_emb(q, k, pos_emb)\n\n # auto padding\n if self.autopad:\n orig_seq_len = q.shape[1]\n (needed_pad, q), (_, k), (_, v) = map(lambda t: pad_to_multiple(t, self.window_size, dim = -2), (q, k, v))\n\n b, n, dim_head, device, dtype = *q.shape, q.device, q.dtype\n scale = dim_head ** -0.5\n\n assert (n % self.window_size) == 0, f'sequence length {n} must be divisible by window size {self.window_size} for local attention'\n\n windows = n // self.window_size\n\n seq = torch.arange(n, device = device)\n b_t = rearrange(seq, '(w n) -> 1 w n', w = windows, n = self.window_size)\n\n bq, bk, bv = map(lambda t: rearrange(t, 'b (w n) d -> b w n d', w = windows), (q, k, v))\n\n look_around_kwargs = dict(\n backward = self.look_backward,\n forward = self.look_forward,\n pad_value = self.pad_value\n )\n\n bk = METHOD_NAME(bk, **look_around_kwargs)\n bv = METHOD_NAME(bv, **look_around_kwargs)\n\n bq_t = b_t\n bq_k = METHOD_NAME(b_t, **look_around_kwargs)\n\n bq_t = rearrange(bq_t, '... i -> ... i 1')\n bq_k = rearrange(bq_k, '... j -> ... 1 j')\n\n sim = einsum('b h i e, b h j e -> b h i j', bq, bk) * scale\n\n mask_value = max_neg_value(sim)\n\n if self.causal:\n causal_mask = bq_t < bq_k\n\n if self.exact_windowsize:\n max_causal_window_size = (self.window_size * self.look_backward)\n causal_mask = causal_mask | (bq_t > (bq_k + max_causal_window_size))\n\n sim = sim.masked_fill(causal_mask, mask_value)\n del causal_mask\n\n # mask out padding value\n if self.autopad and needed_pad:\n pad_mask = bq_k == self.pad_value\n sim = sim.masked_fill(pad_mask, mask_value)\n del pad_mask\n\n if bin_attention_mask is not None:\n mask = bin_attention_mask.bool()\n batch = bin_attention_mask.shape[0]\n assert (b % batch) == 0\n\n h = b // bin_attention_mask.shape[0]\n\n if self.autopad:\n _, mask = pad_to_multiple(mask, self.window_size, dim=-1, value=False)\n\n mask = rearrange(mask, '... (w n) -> (...) w n', w = windows, n = self.window_size)\n mask = METHOD_NAME(mask, **{**look_around_kwargs, 'pad_value': False})\n mask = rearrange(mask, '... j -> ... 1 j')\n mask = repeat(mask, 'b ... -> (b h) ...', h = h)\n sim = sim.masked_fill(~mask, mask_value)\n del mask\n\n # attention\n attn = sim.softmax(dim = -1)\n attn = self.dropout(attn)\n\n # aggregation\n out = einsum('b h i j, b h j e -> b h i e', attn, bv)\n out = rearrange(out, 'b w n d -> b (w n) d')\n\n if self.autopad:\n out = out[:, :orig_seq_len, :]\n\n out, *_ = unpack(out, packed_shape, '* n d')\n return out\n\n\nclass LocalMHA(nn.Module):\n def __init__(\n self,\n arch_config: ArchConfig,\n hidden_size: int,\n total_heads: int,\n op_heads: int,\n att_dropout = 0.,\n prenorm = False,\n use_rotary: bool = True,\n **kwargs\n ):\n super().__init__()\n assert hidden_size % total_heads == 0, 'hidden size must be divisible by total heads'\n\n self.hidden_size = hidden_size\n self.total_heads = total_heads\n self.op_heads = op_heads\n\n head_size = self.hidden_size // self.total_heads\n self.op_size = head_size * self.op_heads\n\n self.norm = nn.LayerNorm(hidden_size) if prenorm else None\n self.to_qkv = nn.Linear(hidden_size, self.op_size * 3, bias = False)\n\n self.attn_fn = LocalAttention(\n window_size = arch_config.pick('window_size'),\n causal = True,\n autopad = True,\n exact_windowsize = True,\n dropout=att_dropout,\n rel_pos_emb_dim=(head_size if use_rotary else None),\n **kwargs\n )\n\n def forward(self, hidden_states, bin_attention_mask: Optional[torch.LongTensor] = None, **kwargs):\n if self.norm is not None:\n hidden_states = self.norm(hidden_states)\n\n q, k, v = self.to_qkv(hidden_states).chunk(3, dim = -1)\n q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = self.op_heads), (q, k, v)) \n\n out = self.attn_fn(q, k, v, bin_attention_mask=bin_attention_mask)\n out = rearrange(out, 'b h n d -> b n (h d)')\n \n return out, None"},"code_compressed":{"kind":"null"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":14,"numItemsPerPage":100,"numTotalItems":6000,"offset":1400,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1Nzg3ODkwMCwic3ViIjoiL2RhdGFzZXRzL3N0YXMxay9sbG0tYm9vdGNhbXAtdGVzdCIsImV4cCI6MTc1Nzg4MjUwMCwiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.cuomTRZgsh2A3boVIb8niylEpERkqXw80I0GATq_HBf6v6cB7zPScYTdR4hlwZErFJRGL3_No38wXwq1yJpCDw","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
id
int64
0
6k
code
stringlengths
4k
8k
code_compressed
null
1,400
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkvpc.endpoint import endpoint_data class UpdateTrafficMirrorFilterRuleAttributeRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'UpdateTrafficMirrorFilterRuleAttribute','vpc') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_SourcePortRange(self): # String return self.get_query_params().get('SourcePortRange') def set_SourcePortRange(self, SourcePortRange): # String self.add_query_param('SourcePortRange', SourcePortRange) def get_ResourceOwnerId(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_DestinationPortRange(self): # String return self.get_query_params().get('DestinationPortRange') def set_DestinationPortRange(self, DestinationPortRange): # String self.add_query_param('DestinationPortRange', DestinationPortRange) def get_ClientToken(self): # String return self.get_query_params().get('ClientToken') def set_ClientToken(self, ClientToken): # String self.add_query_param('ClientToken', ClientToken) def get_RuleAction(self): # String return self.get_query_params().get('RuleAction') def set_RuleAction(self, RuleAction): # String self.add_query_param('RuleAction', RuleAction) def get_Protocol(self): # String return self.get_query_params().get('Protocol') def set_Protocol(self, Protocol): # String self.add_query_param('Protocol', Protocol) def get_SourceCidrBlock(self): # String return self.get_query_params().get('SourceCidrBlock') def set_SourceCidrBlock(self, SourceCidrBlock): # String self.add_query_param('SourceCidrBlock', SourceCidrBlock) def get_DryRun(self): # Boolean return self.get_query_params().get('DryRun') def set_DryRun(self, DryRun): # Boolean self.add_query_param('DryRun', DryRun) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_DestinationCidrBlock(self): # String return self.get_query_params().get('DestinationCidrBlock') def set_DestinationCidrBlock(self, DestinationCidrBlock): # String self.add_query_param('DestinationCidrBlock', DestinationCidrBlock) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def METHOD_NAME(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_Priority(self): # Integer return self.get_query_params().get('Priority') def set_Priority(self, Priority): # Integer self.add_query_param('Priority', Priority) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_TrafficMirrorFilterRuleId(self): # String return self.get_query_params().get('TrafficMirrorFilterRuleId') def set_TrafficMirrorFilterRuleId(self, TrafficMirrorFilterRuleId): # String self.add_query_param('TrafficMirrorFilterRuleId', TrafficMirrorFilterRuleId)
null
1,401
""" testing models """ from unittest.mock import patch from django.test import TestCase from bookwyrm import models class Notification(TestCase): """let people know things""" def setUp(self): # pylint: disable=invalid-name """useful things for creating a notification""" with patch("bookwyrm.suggested_users.rerank_suggestions_task.delay"), patch( "bookwyrm.activitystreams.populate_stream_task.delay" ), patch("bookwyrm.lists_stream.populate_lists_task.delay"): self.local_user = models.User.objects.create_user( "mouse", "[email protected]", "mouseword", local=True, localname="mouse" ) self.another_user = models.User.objects.create_user( "rat", "[email protected]", "ratword", local=True, localname="rat" ) with patch("bookwyrm.models.user.set_remote_server.delay"): self.remote_user = models.User.objects.create_user( "rat", "[email protected]", "ratword", local=False, remote_id="https://example.com/users/rat", inbox="https://example.com/users/rat/inbox", outbox="https://example.com/users/rat/outbox", ) self.work = models.Work.objects.create(title="Test Work") self.book = models.Edition.objects.create( title="Test Book", isbn_13="1234567890123", remote_id="https://example.com/book/1", parent_work=self.work, ) self.another_book = models.Edition.objects.create( title="Second Test Book", parent_work=models.Work.objects.create(title="Test Work"), ) def test_notification(self): """New notifications are unread""" notification = models.Notification.objects.create( user=self.local_user, notification_type=models.Notification.FAVORITE ) self.assertFalse(notification.read) def test_notify(self): """Create a notification""" models.Notification.notify( self.local_user, self.remote_user, notification_type=models.Notification.FAVORITE, ) self.assertTrue(models.Notification.objects.exists()) def test_notify_grouping(self): """Bundle notifications""" models.Notification.notify( self.local_user, self.remote_user, notification_type=models.Notification.FAVORITE, ) self.assertEqual(models.Notification.objects.count(), 1) notification = models.Notification.objects.get() self.assertEqual(notification.related_users.count(), 1) models.Notification.notify( self.local_user, self.another_user, notification_type=models.Notification.FAVORITE, ) self.assertEqual(models.Notification.objects.count(), 1) notification.refresh_from_db() self.assertEqual(notification.related_users.count(), 2) def test_notify_grouping_with_dupes(self): """If there are multiple options to group with, don't cause an error""" models.Notification.objects.create( user=self.local_user, notification_type="FAVORITE" ) models.Notification.objects.create( user=self.local_user, notification_type="FAVORITE" ) models.Notification.notify(self.local_user, None, notification_type="FAVORITE") self.assertEqual(models.Notification.objects.count(), 2) def METHOD_NAME(self): """Don't create notifications for remote users""" models.Notification.notify( self.remote_user, self.local_user, notification_type=models.Notification.FAVORITE, ) self.assertFalse(models.Notification.objects.exists()) def test_notify_self(self): """Don't create notifications for yourself""" models.Notification.notify( self.local_user, self.local_user, notification_type=models.Notification.FAVORITE, ) self.assertFalse(models.Notification.objects.exists()) @patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async") @patch("bookwyrm.lists_stream.remove_list_task.delay") def test_notify_list_item_own_list(self, *_): """Don't add list item notification for your own list""" test_list = models.List.objects.create(user=self.local_user, name="hi") models.ListItem.objects.create( user=self.local_user, book=self.book, book_list=test_list, order=1 ) self.assertFalse(models.Notification.objects.exists()) @patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async") @patch("bookwyrm.lists_stream.remove_list_task.delay") def test_notify_list_item_remote(self, *_): """Don't add list item notification for a remote user""" test_list = models.List.objects.create(user=self.remote_user, name="hi") models.ListItem.objects.create( user=self.local_user, book=self.book, book_list=test_list, order=1 ) self.assertFalse(models.Notification.objects.exists()) @patch("bookwyrm.models.activitypub_mixin.broadcast_task.apply_async") @patch("bookwyrm.lists_stream.remove_list_task.delay") def test_notify_list_item(self, *_): """Add list item notification""" test_list = models.List.objects.create(user=self.local_user, name="hi") list_item = models.ListItem.objects.create( user=self.remote_user, book=self.book, book_list=test_list, order=2 ) notification = models.Notification.objects.get() self.assertEqual(notification.related_users.count(), 1) self.assertEqual(notification.related_users.first(), self.remote_user) self.assertEqual(notification.related_list_items.count(), 1) self.assertEqual(notification.related_list_items.first(), list_item) models.ListItem.objects.create( user=self.remote_user, book=self.another_book, book_list=test_list, order=3 ) notification = models.Notification.objects.get() self.assertEqual(notification.related_users.count(), 1) self.assertEqual(notification.related_users.first(), self.remote_user) self.assertEqual(notification.related_list_items.count(), 2) def test_unnotify(self): """Remove a notification""" models.Notification.notify( self.local_user, self.remote_user, notification_type=models.Notification.FAVORITE, ) self.assertTrue(models.Notification.objects.exists()) models.Notification.unnotify( self.local_user, self.remote_user, notification_type=models.Notification.FAVORITE, ) self.assertFalse(models.Notification.objects.exists()) def test_unnotify_multiple_users(self): """Remove a notification""" models.Notification.notify( self.local_user, self.remote_user, notification_type=models.Notification.FAVORITE, ) models.Notification.notify( self.local_user, self.another_user, notification_type=models.Notification.FAVORITE, ) self.assertTrue(models.Notification.objects.exists()) models.Notification.unnotify( self.local_user, self.remote_user, notification_type=models.Notification.FAVORITE, ) self.assertTrue(models.Notification.objects.exists()) models.Notification.unnotify( self.local_user, self.another_user, notification_type=models.Notification.FAVORITE, ) self.assertFalse(models.Notification.objects.exists())
null
1,402
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest class InsertMaterialRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Trademark', '2018-07-24', 'InsertMaterial','trademark') def get_ContactEmail(self): return self.get_query_params().get('ContactEmail') def set_ContactEmail(self,ContactEmail): self.add_query_param('ContactEmail',ContactEmail) def get_ContactAddress(self): return self.get_query_params().get('ContactAddress') def set_ContactAddress(self,ContactAddress): self.add_query_param('ContactAddress',ContactAddress) def get_EAddress(self): return self.get_query_params().get('EAddress') def set_EAddress(self,EAddress): self.add_query_param('EAddress',EAddress) def get_Country(self): return self.get_query_params().get('Country') def set_Country(self,Country): self.add_query_param('Country',Country) def get_LegalNoticeOssKey(self): return self.get_query_params().get('LegalNoticeOssKey') def set_LegalNoticeOssKey(self,LegalNoticeOssKey): self.add_query_param('LegalNoticeOssKey',LegalNoticeOssKey) def get_Address(self): return self.get_query_params().get('Address') def set_Address(self,Address): self.add_query_param('Address',Address) def get_Town(self): return self.get_query_params().get('Town') def set_Town(self,Town): self.add_query_param('Town',Town) def get_ContactNumber(self): return self.get_query_params().get('ContactNumber') def set_ContactNumber(self,ContactNumber): self.add_query_param('ContactNumber',ContactNumber) def get_City(self): return self.get_query_params().get('City') def set_City(self,City): self.add_query_param('City',City) def get_IdCardOssKey(self): return self.get_query_params().get('IdCardOssKey') def set_IdCardOssKey(self,IdCardOssKey): self.add_query_param('IdCardOssKey',IdCardOssKey) def get_Type(self): return self.get_query_params().get('Type') def METHOD_NAME(self,Type): self.add_query_param('Type',Type) def get_ContactName(self): return self.get_query_params().get('ContactName') def set_ContactName(self,ContactName): self.add_query_param('ContactName',ContactName) def get_PassportOssKey(self): return self.get_query_params().get('PassportOssKey') def set_PassportOssKey(self,PassportOssKey): self.add_query_param('PassportOssKey',PassportOssKey) def get_ContactZipcode(self): return self.get_query_params().get('ContactZipcode') def set_ContactZipcode(self,ContactZipcode): self.add_query_param('ContactZipcode',ContactZipcode) def get_EName(self): return self.get_query_params().get('EName') def set_EName(self,EName): self.add_query_param('EName',EName) def get_Province(self): return self.get_query_params().get('Province') def set_Province(self,Province): self.add_query_param('Province',Province) def get_BusinessLicenceOssKey(self): return self.get_query_params().get('BusinessLicenceOssKey') def set_BusinessLicenceOssKey(self,BusinessLicenceOssKey): self.add_query_param('BusinessLicenceOssKey',BusinessLicenceOssKey) def get_Name(self): return self.get_query_params().get('Name') def set_Name(self,Name): self.add_query_param('Name',Name) def get_CardNumber(self): return self.get_query_params().get('CardNumber') def set_CardNumber(self,CardNumber): self.add_query_param('CardNumber',CardNumber) def get_Region(self): return self.get_query_params().get('Region') def set_Region(self,Region): self.add_query_param('Region',Region) def get_LoaOssKey(self): return self.get_query_params().get('LoaOssKey') def set_LoaOssKey(self,LoaOssKey): self.add_query_param('LoaOssKey',LoaOssKey
null
1,403
from galaxy.selenium.navigates_galaxy import edit_details from .framework import ( retry_assertion_during_transitions, selenium_test, SeleniumTestCase, ) NEW_HISTORY_NAME = "New History Name" HISTORY_PANEL_AXE_IMPACT_LEVEL = "moderate" class TestHistoryPanel(SeleniumTestCase): ensure_registered = True @selenium_test def test_history_panel_landing_state(self): self.assert_initial_history_panel_state_correct() editor = self.components.history_panel.editor.selector(scope=".history-index") self.components.history_panel._.assert_no_axe_violations_with_impact_of_at_least(HISTORY_PANEL_AXE_IMPACT_LEVEL) toggle = editor.toggle toggle.wait_for_visible() @selenium_test def test_history_panel_rename(self): self.history_panel_rename(NEW_HISTORY_NAME) self.assert_name_changed() @selenium_test def test_history_rename_cancel_with_escape(self): self.open_history_editor() editable_text_input_element = self.history_panel_name_input() editable_text_input_element.send_keys(NEW_HISTORY_NAME) self.components.history_panel._.assert_no_axe_violations_with_impact_of_at_least(HISTORY_PANEL_AXE_IMPACT_LEVEL) self.send_escape(editable_text_input_element) self.components.history_panel.name_edit_input.wait_for_absent_or_hidden() assert NEW_HISTORY_NAME not in self.history_panel_name() @selenium_test @edit_details def test_history_tags_and_annotations_buttons(self): history_editor = self.components.history_panel.editor.selector(scope=".history-index") history_editor.annotation_input.wait_for_clickable() history_editor.tags_input.wait_for_clickable() @selenium_test def test_history_panel_annotations_change(self): history_panel = self.components.history_panel @retry_assertion_during_transitions def assert_current_annotation(expected, error_message="History annotation", is_equal=True): text_component = history_panel.annotation_editable_text current_annotation = text_component.wait_for_visible() error_message += " given: [%s] expected [%s] " if is_equal: assert current_annotation.text == expected, error_message % (current_annotation.text, expected) else: assert current_annotation.text != expected, error_message % (current_annotation.text, expected) def set_random_annotation(clear_text=True): random_annotation = self._get_random_name(prefix="arbitrary_annotation_") self.set_history_annotation(random_annotation, clear_text) return random_annotation # assert that annotation wasn't set before history_panel.annotation_area.assert_absent_or_hidden() # assign annotation random text initial_annotation = set_random_annotation() assert_current_annotation(initial_annotation) # change annotation text changed_annotation = set_random_annotation() assert_current_annotation( initial_annotation, error_message="History annotation was not changed!", is_equal=False ) assert_current_annotation( changed_annotation, error_message="History annotation was changed, but annotation text is wrong!", is_equal=True, ) @selenium_test def test_history_panel_tags_change(self): def create_tags(size): history_panel_tags = list() for i in range(size): history_panel_tags.append(self._get_random_name(prefix="arbitrary_tag_%s_") % i) return history_panel_tags def add_tags(tags_size): tags = create_tags(tags_size) self.history_panel_add_tags(tags) return tags # check tags against list def assert_current_tags(expected_tags): current_tags = self.open_tags() errmsg = f"tags [{current_tags}] are not the same as expected [{expected_tags}]" assert [tag.text for tag in current_tags.all()] == expected_tags, errmsg # looks like this is intended to check if the tag editor is open def assert_no_tags(): tags_component = self.components.history_panel.tag_editor.selector(scope=".history-index") tags_component.display.assert_absent_or_hidden() assert_no_tags() # add new tags to empty tags area tags_size = 6 tags = add_tags(tags_size) assert_current_tags(tags) # add more tags to non-empty tags area tags += add_tags(tags_size) self.sleep_for(self.wait_types.UX_RENDER) tags.sort() assert_current_tags(tags) # delete all tags expected_tags_len = len(tags) self.clear_tags(expected_tags_len) self.sleep_for(self.wait_types.UX_RENDER) assert_no_tags() # after about 5 tags, a toggle link shows up and you have to click it to see the full list def open_tags(self): tags_component = self.components.history_panel.tag_editor.selector(scope=".history-index") if tags_component.tag_area.is_absent: tags_component.toggle.wait_for_and_click() tags_component.display.wait_for_visible() return tags_component.display @edit_details def clear_tags(self, expected_tags_size): self.open_tags() tags = self.components.history_panel.tag_editor.selector(scope=".history-index") close_tag_buttons = tags.tag_close_btn.all() current_tags_size = len(close_tag_buttons) errmsg = f"there are more tags than expected! current {current_tags_size}, expected {expected_tags_size}" assert expected_tags_size == current_tags_size, errmsg for close_btn in reversed(close_tag_buttons): close_btn.click() self.sleep_for(self.wait_types.UX_RENDER) @selenium_test def test_refresh_preserves_state(self): self.perform_upload(self.get_filename("1.txt")) self.wait_for_history() # Open the details, verify they are open and do a refresh. self.history_panel_ensure_showing_item_details(hid=1) self.history_panel_item_body_component(1, wait=True) self.METHOD_NAME() self.wait_for_history() # After the refresh, verify the details are still open. self.sleep_for(self.wait_types.UX_TRANSITION) self.wait_for_selector_clickable(self.history_panel_item_selector(hid=1)) assert self.history_panel_item_showing_details(hid=1) # Close the detailed display, refresh, and ensure they are still closed. self.history_panel_click_item_title(hid=1, wait=False) assert not self.history_panel_item_showing_details(hid=1) self.METHOD_NAME() self.sleep_for(self.wait_types.UX_TRANSITION) self.wait_for_selector_clickable(self.history_panel_item_selector(hid=1)) assert not self.history_panel_item_showing_details(hid=1) @retry_assertion_during_transitions def assert_name_changed(self): name = self.history_panel_name() assert name == NEW_HISTORY_NAME def METHOD_NAME(self): self.home()
null
1,404
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest class PutCustomMetricRuleRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Cms', '2019-01-01', 'PutCustomMetricRule','cms') self.set_method('POST') def get_Webhook(self): # String return self.get_query_params().get('Webhook') def set_Webhook(self, Webhook): # String self.add_query_param('Webhook', Webhook) def get_RuleName(self): # String return self.get_query_params().get('RuleName') def set_RuleName(self, RuleName): # String self.add_query_param('RuleName', RuleName) def get_Threshold(self): # String return self.get_query_params().get('Threshold') def set_Threshold(self, Threshold): # String self.add_query_param('Threshold', Threshold) def get_EffectiveInterval(self): # String return self.get_query_params().get('EffectiveInterval') def set_EffectiveInterval(self, EffectiveInterval): # String self.add_query_param('EffectiveInterval', EffectiveInterval) def get_EmailSubject(self): # String return self.get_query_params().get('EmailSubject') def set_EmailSubject(self, EmailSubject): # String self.add_query_param('EmailSubject', EmailSubject) def get_EvaluationCount(self): # Integer return self.get_query_params().get('EvaluationCount') def set_EvaluationCount(self, EvaluationCount): # Integer self.add_query_param('EvaluationCount', EvaluationCount) def get_SilenceTime(self): # Integer return self.get_query_params().get('SilenceTime') def set_SilenceTime(self, SilenceTime): # Integer self.add_query_param('SilenceTime', SilenceTime) def get_MetricName(self): # String return self.get_query_params().get('MetricName') def set_MetricName(self, MetricName): # String self.add_query_param('MetricName', MetricName) def get_Period(self): # String return self.get_query_params().get('Period') def set_Period(self, Period): # String self.add_query_param('Period', Period) def get_ContactGroups(self): # String return self.get_query_params().get('ContactGroups') def set_ContactGroups(self, ContactGroups): # String self.add_query_param('ContactGroups', ContactGroups) def get_Level(self): # String return self.get_query_params().get('Level') def set_Level(self, Level): # String self.add_query_param('Level', Level) def get_GroupId(self): # String return self.get_query_params().get('GroupId') def set_GroupId(self, GroupId): # String self.add_query_param('GroupId', GroupId) def METHOD_NAME(self): # String return self.get_query_params().get('Resources') def set_Resources(self, Resources): # String self.add_query_param('Resources', Resources) def get_RuleId(self): # String return self.get_query_params().get('RuleId') def set_RuleId(self, RuleId): # String self.add_query_param('RuleId', RuleId) def get_ComparisonOperator(self): # String return self.get_query_params().get('ComparisonOperator') def set_ComparisonOperator(self, ComparisonOperator): # String self.add_query_param('ComparisonOperator', ComparisonOperator) def get_Statistics(self): # String return self.get_query_params().get('Statistics') def set_Statistics(self, Statistics): # String self.add_query_param('Statistics', Statistics)
null
1,405
"""Utilities for constructing Galaxy integration tests. Tests that start an actual Galaxy server with a particular configuration in order to test something that cannot be tested with the default functional/api testing configuration. """ import os import re from typing import ( ClassVar, Iterator, Optional, Type, TYPE_CHECKING, TypeVar, ) from unittest import ( skip, SkipTest, ) import pytest from galaxy.app import UniverseApplication from galaxy.tool_util.verify.test_data import TestDataResolver from galaxy.util import safe_makedirs from galaxy.util.unittest import TestCase from galaxy.util.unittest_utils import ( _identity, skip_unless_executable, ) from galaxy_test.base.api import ( UsesApiTestCaseMixin, UsesCeleryTasks, ) from .driver_util import GalaxyTestDriver if TYPE_CHECKING: from galaxy_test.base.populators import BaseDatasetPopulator NO_APP_MESSAGE = "test_case._app called though no Galaxy has been configured." # Following should be for Homebrew Rabbitmq and Docker on Mac "amqp://guest:guest@localhost:5672//" AMQP_URL = os.environ.get("GALAXY_TEST_AMQP_URL", None) POSTGRES_CONFIGURED = "postgres" in os.environ.get("GALAXY_TEST_DBURI", "") SCRIPT_DIRECTORY = os.path.abspath(os.path.dirname(__file__)) VAULT_CONF = os.path.join(SCRIPT_DIRECTORY, "vault_conf.yml") def skip_if_jenkins(cls): if os.environ.get("BUILD_NUMBER", ""): return skip return cls def skip_unless_amqp(): if AMQP_URL is not None: return _identity return pytest.mark.skip("AMQP_URL is not set, required for this test.") def skip_unless_postgres(): if POSTGRES_CONFIGURED: return _identity return pytest.mark.skip("GALAXY_TEST_DBURI does not point to postgres database, required for this test.") def skip_unless_docker(): return skip_unless_executable("docker") def skip_unless_kubernetes(): return skip_unless_executable("kubectl") def k8s_config_path(): return os.environ.get("GALAXY_TEST_KUBE_CONFIG_PATH", "~/.kube/config") def skip_unless_fixed_port(): if os.environ.get("GALAXY_TEST_PORT_RANDOM") != "1": return _identity return pytest.mark.skip("GALAXY_TEST_PORT must be set for this test.") def skip_if_github_workflow(): if os.environ.get("GITHUB_ACTIONS", None) is None: return _identity return pytest.mark.skip("This test is skipped for Github actions.") def skip_unless_environ(env_var): if os.environ.get(env_var): return _identity return pytest.mark.skip(f"{env_var} must be set for this test") class IntegrationInstance(UsesApiTestCaseMixin, UsesCeleryTasks): """Unit test case with utilities for spinning up Galaxy.""" _test_driver: GalaxyTestDriver # Optional in parent class, but required for integration tests. _app_available: ClassVar[bool] prefer_template_database = True # Don't pull in default configs for un-configured things from Galaxy's # config directory and such. isolate_galaxy_config = True dataset_populator: Optional["BaseDatasetPopulator"] @classmethod def setUpClass(cls): """Configure and start Galaxy for a test.""" cls._app_available = False cls._test_driver = GalaxyTestDriver() cls._prepare_galaxy() cls._test_driver.setup(config_object=cls) cls._app_available = True cls._configure_app() @classmethod def tearDownClass(cls): """Shutdown Galaxy server and cleanup temp directory.""" cls._test_driver.tear_down() cls._app_available = False def tearDown(self): logs = self._test_driver.get_logs() if logs: print(logs) return super().tearDown() def setUp(self): self.test_data_resolver = TestDataResolver() self._configure_interactor() def _configure_interactor(self): # Setup attributes needed for API testing... server_wrapper = self._test_driver.server_wrappers[0] host = server_wrapper.host port = server_wrapper.port prefix = server_wrapper.prefix or "" self.url = f"http://{host}:{port}{prefix.rstrip('/')}/" self._setup_interactor() def restart(self, handle_reconfig=None): self._test_driver.restart(config_object=self.__class__, handle_config=handle_reconfig) self._configure_app() self._configure_interactor() @property def _app(self) -> UniverseApplication: assert self._app_available, NO_APP_MESSAGE app = self._test_driver.app assert app, NO_APP_MESSAGE return app @property def METHOD_NAME(self): return self._test_driver.galaxy_test_tmp_dir @classmethod def _prepare_galaxy(cls): """Extension point for subclasses called before Galaxy is launched.""" @classmethod def _configure_app(cls): """Extension point for subclasses called after Galaxy is launched. ```self._app``` can be used to access Galaxy core app. """ def _skip_unless_postgres(self): if not self._app.config.database_connection.startswith("post"): raise SkipTest("Test only valid for postgres") def _run_tool_test(self, *args, **kwargs): return self._test_driver.run_tool_test(*args, **kwargs) @classmethod def temp_config_dir(cls, name): # realpath here to get around problems with symlinks being blocked. return os.path.realpath(os.path.join(cls._test_driver.galaxy_test_tmp_dir, name)) @pytest.fixture def history_id(self) -> Iterator[str]: assert self.dataset_populator with self.dataset_populator.test_history() as history_id: yield history_id class IntegrationTestCase(IntegrationInstance, TestCase): """Unit TestCase with utilities for spinning up Galaxy.""" IntegrationInstanceObject = TypeVar("IntegrationInstanceObject", bound=IntegrationInstance) def integration_module_instance(clazz: Type[IntegrationInstanceObject]): def _instance() -> Iterator[IntegrationInstanceObject]: instance = clazz() instance.setUpClass() instance.setUp() yield instance instance.tearDownClass() return pytest.fixture(scope="module")(_instance) def integration_tool_runner(tool_ids): def test_tools(instance, tool_id): instance._run_tool_test(tool_id) return pytest.mark.parametrize("tool_id", tool_ids)(test_tools) class ConfiguresObjectStores: object_stores_parent: ClassVar[str] _test_driver: GalaxyTestDriver @classmethod def _configure_object_store(cls, template, config): temp_directory = cls._test_driver.mkdtemp() cls.object_stores_parent = temp_directory config_path = os.path.join(temp_directory, "object_store_conf.xml") xml = template.safe_substitute({"temp_directory": temp_directory}) with open(config_path, "w") as f: f.write(xml) config["object_store_config_file"] = config_path for path in re.findall(r'files_dir path="([^"]*)"', xml): assert path.startswith(temp_directory) dir_name = os.path.basename(path) os.path.join(temp_directory, dir_name) safe_makedirs(path) setattr(cls, f"{dir_name}_path", path) class ConfiguresDatabaseVault: @classmethod def _configure_database_vault(cls, config): config["vault_config_file"] = VAULT_CONF
null
1,406
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkcbn.endpoint import endpoint_data class UpdateTrafficMarkingPolicyAttributeRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Cbn', '2017-09-12', 'UpdateTrafficMarkingPolicyAttribute') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ResourceOwnerId(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_ClientToken(self): # String return self.get_query_params().get('ClientToken') def set_ClientToken(self, ClientToken): # String self.add_query_param('ClientToken', ClientToken) def get_AddTrafficMatchRuless(self): # RepeatList return self.get_query_params().get('AddTrafficMatchRules') def set_AddTrafficMatchRuless(self, AddTrafficMatchRules): # RepeatList for depth1 in range(len(AddTrafficMatchRules)): if AddTrafficMatchRules[depth1].get('DstPortRange') is not None: for depth2 in range(len(AddTrafficMatchRules[depth1].get('DstPortRange'))): self.add_query_param('AddTrafficMatchRules.' + str(depth1 + 1) + '.DstPortRange.' + str(depth2 + 1), AddTrafficMatchRules[depth1].get('DstPortRange')[depth2]) if AddTrafficMatchRules[depth1].get('MatchDscp') is not None: self.add_query_param('AddTrafficMatchRules.' + str(depth1 + 1) + '.MatchDscp', AddTrafficMatchRules[depth1].get('MatchDscp')) if AddTrafficMatchRules[depth1].get('Protocol') is not None: self.add_query_param('AddTrafficMatchRules.' + str(depth1 + 1) + '.Protocol', AddTrafficMatchRules[depth1].get('Protocol')) if AddTrafficMatchRules[depth1].get('TrafficMatchRuleDescription') is not None: self.add_query_param('AddTrafficMatchRules.' + str(depth1 + 1) + '.TrafficMatchRuleDescription', AddTrafficMatchRules[depth1].get('TrafficMatchRuleDescription')) if AddTrafficMatchRules[depth1].get('SrcPortRange') is not None: for depth2 in range(len(AddTrafficMatchRules[depth1].get('SrcPortRange'))): self.add_query_param('AddTrafficMatchRules.' + str(depth1 + 1) + '.SrcPortRange.' + str(depth2 + 1), AddTrafficMatchRules[depth1].get('SrcPortRange')[depth2]) if AddTrafficMatchRules[depth1].get('DstCidr') is not None: self.add_query_param('AddTrafficMatchRules.' + str(depth1 + 1) + '.DstCidr', AddTrafficMatchRules[depth1].get('DstCidr')) if AddTrafficMatchRules[depth1].get('TrafficMatchRuleName') is not None: self.add_query_param('AddTrafficMatchRules.' + str(depth1 + 1) + '.TrafficMatchRuleName', AddTrafficMatchRules[depth1].get('TrafficMatchRuleName')) if AddTrafficMatchRules[depth1].get('SrcCidr') is not None: self.add_query_param('AddTrafficMatchRules.' + str(depth1 + 1) + '.SrcCidr', AddTrafficMatchRules[depth1].get('SrcCidr')) def get_TrafficMarkingPolicyDescription(self): # String return self.get_query_params().get('TrafficMarkingPolicyDescription') def set_TrafficMarkingPolicyDescription(self, TrafficMarkingPolicyDescription): # String self.add_query_param('TrafficMarkingPolicyDescription', TrafficMarkingPolicyDescription) def get_TrafficMarkingPolicyId(self): # String return self.get_query_params().get('TrafficMarkingPolicyId') def set_TrafficMarkingPolicyId(self, TrafficMarkingPolicyId): # String self.add_query_param('TrafficMarkingPolicyId', TrafficMarkingPolicyId) def get_TrafficMarkingPolicyName(self): # String return self.get_query_params().get('TrafficMarkingPolicyName') def set_TrafficMarkingPolicyName(self, TrafficMarkingPolicyName): # String self.add_query_param('TrafficMarkingPolicyName', TrafficMarkingPolicyName) def get_DryRun(self): # Boolean return self.get_query_params().get('DryRun') def set_DryRun(self, DryRun): # Boolean self.add_query_param('DryRun', DryRun) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def METHOD_NAME(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_DeleteTrafficMatchRuless(self): # RepeatList return self.get_query_params().get('DeleteTrafficMatchRules') def set_DeleteTrafficMatchRuless(self, DeleteTrafficMatchRules): # RepeatList for depth1 in range(len(DeleteTrafficMatchRules)): if DeleteTrafficMatchRules[depth1].get('DstPortRange') is not None: for depth2 in range(len(DeleteTrafficMatchRules[depth1].get('DstPortRange'))): self.add_query_param('DeleteTrafficMatchRules.' + str(depth1 + 1) + '.DstPortRange.' + str(depth2 + 1), DeleteTrafficMatchRules[depth1].get('DstPortRange')[depth2]) if DeleteTrafficMatchRules[depth1].get('MatchDscp') is not None: self.add_query_param('DeleteTrafficMatchRules.' + str(depth1 + 1) + '.MatchDscp', DeleteTrafficMatchRules[depth1].get('MatchDscp')) if DeleteTrafficMatchRules[depth1].get('Protocol') is not None: self.add_query_param('DeleteTrafficMatchRules.' + str(depth1 + 1) + '.Protocol', DeleteTrafficMatchRules[depth1].get('Protocol')) if DeleteTrafficMatchRules[depth1].get('TrafficMatchRuleDescription') is not None: self.add_query_param('DeleteTrafficMatchRules.' + str(depth1 + 1) + '.TrafficMatchRuleDescription', DeleteTrafficMatchRules[depth1].get('TrafficMatchRuleDescription')) if DeleteTrafficMatchRules[depth1].get('SrcPortRange') is not None: for depth2 in range(len(DeleteTrafficMatchRules[depth1].get('SrcPortRange'))): self.add_query_param('DeleteTrafficMatchRules.' + str(depth1 + 1) + '.SrcPortRange.' + str(depth2 + 1), DeleteTrafficMatchRules[depth1].get('SrcPortRange')[depth2]) if DeleteTrafficMatchRules[depth1].get('DstCidr') is not None: self.add_query_param('DeleteTrafficMatchRules.' + str(depth1 + 1) + '.DstCidr', DeleteTrafficMatchRules[depth1].get('DstCidr')) if DeleteTrafficMatchRules[depth1].get('TrafficMatchRuleName') is not None: self.add_query_param('DeleteTrafficMatchRules.' + str(depth1 + 1) + '.TrafficMatchRuleName', DeleteTrafficMatchRules[depth1].get('TrafficMatchRuleName')) if DeleteTrafficMatchRules[depth1].get('SrcCidr') is not None: self.add_query_param('DeleteTrafficMatchRules.' + str(depth1 + 1) + '.SrcCidr', DeleteTrafficMatchRules[depth1].get('SrcCidr'))
null
1,407
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2020, Simon Dodsley ([email protected]) # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { "metadata_version": "1.1", "status": ["preview"], "supported_by": "community", } DOCUMENTATION = r""" --- module: purefa_directory version_added: '1.5.0' short_description: Manage FlashArray File System Directories description: - Create/Delete FlashArray File Systems author: - Pure Storage Ansible Team (@sdodsley) <[email protected]> options: name: description: - Name of the directory type: str required: true state: description: - Define whether the directory should exist or not. default: present choices: [ absent, present ] type: str filesystem: description: - Name of the filesystem the directory links to. type: str required: true path: description: - Path of the managed directory in the file system - If not provided will default to I(name) type: str rename: description: - Value to rename the specified directory to type: str extends_documentation_fragment: - purestorage.flasharray.purestorage.fa """ EXAMPLES = r""" - name: Create direcotry foo in filesysten bar with path zeta purestorage.flasharray.purefa_directory: name: foo filesystem: bar path: zeta fa_url: 10.10.10.2 api_token: e31060a7-21fc-e277-6240-25983c6c4592 - name: Rename directory foo to fin in filesystem bar purestorage.flasharray.purefa_directory: name: foo rename: fin filesystem: bar fa_url: 10.10.10.2 api_token: e31060a7-21fc-e277-6240-25983c6c4592 - name: Delete diectory foo in filesystem bar purestorage.flasharray.purefa_directory: name: foo filesystem: bar state: absent fa_url: 10.10.10.2 api_token: e31060a7-21fc-e277-6240-25983c6c4592 """ RETURN = r""" """ HAS_PURESTORAGE = True try: from pypureclient import flasharray except ImportError: HAS_PURESTORAGE = False from ansible.module_utils.basic import AnsibleModule from ansible_collections.purestorage.flasharray.plugins.module_utils.purefa import ( get_system, get_array, purefa_argument_spec, ) MIN_REQUIRED_API_VERSION = "2.2" def delete_dir(module, array): """Delete a file system directory""" changed = True if not module.check_mode: res = array.delete_directories( names=[module.params["filesystem"] + ":" + module.params["name"]] ) if res.status_code != 200: module.fail_json( msg="Failed to delete file system {0}. {1}".format( module.params["name"], res.errors[0].message ) ) module.exit_json(changed=changed) def METHOD_NAME(module, array): """Rename a file system directory""" changed = False target = array.get_directories( names=[module.params["filesystem"] + ":" + module.params["rename"]] ) if target.status_code != 200: if not module.check_mode: changed = True directory = flasharray.DirectoryPatch( name=module.params["filesystem"] + ":" + module.params["rename"] ) res = array.patch_directories( names=[module.params["filesystem"] + ":" + module.params["name"]], directory=directory, ) if res.status_code != 200: module.fail_json( msg="Failed to delete file system {0}".format(module.params["name"]) ) else: module.fail_json( msg="Target file system {0} already exists".format(module.params["rename"]) ) module.exit_json(changed=changed) def create_dir(module, array): """Create a file system directory""" changed = False if not module.params["path"]: module.params["path"] = module.params["name"] all_fs = list( array.get_directories(file_system_names=[module.params["filesystem"]]).items ) for check in range(0, len(all_fs)): if module.params["path"] == all_fs[check].path[1:]: module.fail_json( msg="Path {0} already existis in file system {1}".format( module.params["path"], module.params["filesystem"] ) ) changed = True if not module.check_mode: directory = flasharray.DirectoryPost( directory_name=module.params["name"], path=module.params["path"] ) res = array.post_directories( file_system_names=[module.params["filesystem"]], directory=directory ) if res.status_code != 200: module.fail_json( msg="Failed to create file system {0}. {1}".format( module.params["name"], res.errors[0].message ) ) module.exit_json(changed=changed) def main(): argument_spec = purefa_argument_spec() argument_spec.update( dict( state=dict(type="str", default="present", choices=["absent", "present"]), filesystem=dict(type="str", required=True), name=dict(type="str", required=True), rename=dict(type="str"), path=dict(type="str"), ) ) module = AnsibleModule(argument_spec, supports_check_mode=True) if not HAS_PURESTORAGE: module.fail_json(msg="py-pure-client sdk is required for this module") array = get_system(module) api_version = array._list_available_rest_versions() if MIN_REQUIRED_API_VERSION not in api_version: module.fail_json( msg="FlashArray REST version not supported. " "Minimum version required: {0}".format(MIN_REQUIRED_API_VERSION) ) array = get_array(module) state = module.params["state"] try: filesystem = list( array.get_file_systems(names=[module.params["filesystem"]]).items )[0] except Exception: module.fail_json( msg="Selected file system {0} does not exist".format( module.params["filesystem"] ) ) res = array.get_directories( names=[module.params["filesystem"] + ":" + module.params["name"]] ) exists = bool(res.status_code == 200) if state == "present" and not exists: create_dir(module, array) elif ( state == "present" and exists and module.params["rename"] and not filesystem.destroyed ): METHOD_NAME(module, array) elif state == "absent" and exists: delete_dir(module, array) module.exit_json(changed=False) if __name__ == "__main__": main()
null
1,408
#!/usr/bin/env python # # VM Backup extension # # Copyright 2015 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Requires Python 2.7+ # import inspect import os import sys import traceback from time import sleep scriptdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) maindir = os.path.abspath(os.path.join(scriptdir, '../../')) sys.path.append(maindir) transitionsdir = os.path.abspath(os.path.join(scriptdir, '../../transitions')) sys.path.append(transitionsdir) from oscrypto import * from encryptstates import * from Common import * from CommandExecutor import * from DiskUtil import * from transitions import * class RHEL68EncryptionStateMachine(OSEncryptionStateMachine): states = [ State(name='uninitialized'), State(name='prereq', on_enter='on_enter_state'), State(name='selinux', on_enter='on_enter_state'), State(name='stripdown', on_enter='on_enter_state'), State(name='unmount_oldroot', on_enter='on_enter_state'), State(name='encrypt_block_device', on_enter='on_enter_state'), State(name='patch_boot_system', on_enter='on_enter_state'), State(name='completed'), ] transitions = [ { 'trigger': 'skip_encryption', 'source': 'uninitialized', 'dest': 'completed' }, { 'trigger': 'enter_prereq', 'source': 'uninitialized', 'dest': 'prereq' }, { 'trigger': 'enter_selinux', 'source': 'prereq', 'dest': 'selinux', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'enter_stripdown', 'source': 'selinux', 'dest': 'stripdown', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'enter_unmount_oldroot', 'source': 'stripdown', 'dest': 'unmount_oldroot', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'retry_unmount_oldroot', 'source': 'unmount_oldroot', 'dest': 'unmount_oldroot', 'before': 'on_enter_state' }, { 'trigger': 'enter_encrypt_block_device', 'source': 'unmount_oldroot', 'dest': 'encrypt_block_device', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'enter_patch_boot_system', 'source': 'encrypt_block_device', 'dest': 'patch_boot_system', 'before': 'on_enter_state', 'conditions': 'should_exit_previous_state' }, { 'trigger': 'stop_machine', 'source': 'patch_boot_system', 'dest': 'completed', 'conditions': 'should_exit_previous_state' }, ] def METHOD_NAME(self): super(RHEL68EncryptionStateMachine, self).METHOD_NAME() def should_exit_previous_state(self): # when this is called, self.state is still the "source" state in the transition return super(RHEL68EncryptionStateMachine, self).should_exit_previous_state() def __init__(self, hutil, distro_patcher, logger, encryption_environment): super(RHEL68EncryptionStateMachine, self).__init__(hutil, distro_patcher, logger, encryption_environment) self.state_objs = { 'prereq': PrereqState(self.context), 'selinux': SelinuxState(self.context), 'stripdown': StripdownState(self.context), 'unmount_oldroot': UnmountOldrootState(self.context), 'encrypt_block_device': EncryptBlockDeviceState(self.context), 'patch_boot_system': PatchBootSystemState(self.context), } self.state_machine = Machine(model=self, states=RHEL68EncryptionStateMachine.states, transitions=RHEL68EncryptionStateMachine.transitions, initial='uninitialized') def start_encryption(self): proc_comm = ProcessCommunicator() self.command_executor.Execute(command_to_execute="mount", raise_exception_on_failure=True, communicator=proc_comm) if '/dev/mapper/osencrypt' in proc_comm.stdout: self.logger.log("OS volume is already encrypted") self.skip_encryption() self.log_machine_state() return self.log_machine_state() self.enter_prereq() self.log_machine_state() self.enter_selinux() self.log_machine_state() self.enter_stripdown() self.log_machine_state() oldroot_unmounted_successfully = False attempt = 1 while not oldroot_unmounted_successfully: self.logger.log("Attempt #{0} to unmount /oldroot".format(attempt)) try: if attempt == 1: self.enter_unmount_oldroot() elif attempt > 10: raise Exception("Could not unmount /oldroot in 10 attempts") else: self.retry_unmount_oldroot() self.log_machine_state() except Exception as e: message = "Attempt #{0} to unmount /oldroot failed with error: {1}, stack trace: {2}".format(attempt, e, traceback.format_exc()) self.logger.log(msg=message) self.hutil.do_status_report(operation='EnableEncryptionOSVolume', status=CommonVariables.extension_error_status, status_code=str(CommonVariables.unmount_oldroot_error), message=message) sleep(10) if attempt > 10: raise Exception(message) else: oldroot_unmounted_successfully = True finally: attempt += 1 self.enter_encrypt_block_device() self.log_machine_state() self.enter_patch_boot_system() self.log_machine_state() self.stop_machine() self.log_machine_state() self._reboot()
null
1,409
from future.moves.urllib.parse import urlencode import github3 import cachecontrol from requests.adapters import HTTPAdapter from requests.exceptions import ConnectionError from addons.github import settings as github_settings from addons.github.exceptions import NotFoundError # Initialize caches https_cache = cachecontrol.CacheControlAdapter() default_adapter = HTTPAdapter() class GitHubClient(object): def __init__(self, external_account=None, access_token=None): self.access_token = getattr(external_account, 'oauth_key', None) or access_token if self.access_token: self.gh3 = github3.login(token=self.access_token) self.gh3.set_client_id( github_settings.CLIENT_ID, github_settings.CLIENT_SECRET ) else: self.gh3 = github3.GitHub() # Caching libary if github_settings.CACHE: self.gh3._session.mount('https://api.github.com/user', default_adapter) self.gh3._session.mount('https://', https_cache) def user(self, user=None): """Fetch a user or the authenticated user. :param user: Optional GitHub user name; will fetch authenticated user if omitted :return dict: GitHub API response """ if user is None: return self.gh3.me() return self.gh3.user(user) def repo(self, user, repo): """Get a single Github repo's info. :param str user: GitHub user name :param str repo: GitHub repo name :return: Dict of repo information See http://developer.github.com/v3/repos/#get """ try: rv = self.gh3.repository(user, repo) except ConnectionError: raise NotFoundError if rv: return rv raise NotFoundError def repos(self): repos = self.gh3.repositories(type='all', sort='pushed') return [repo for repo in repos if repo.permissions['push']] def create_repo(self, repo, **kwargs): return self.gh3.create_repository(repo, **kwargs) def branches(self, user, repo, branch=None): """List a repo's branches or get a single branch (in a list). :param str user: GitHub user name :param str repo: GitHub repo name :param str branch: Branch name if getting a single branch :return: List of branch dicts http://developer.github.com/v3/repos/#list-branches """ if branch: return [self.repo(user, repo).branch(branch)] return self.repo(user, repo).branches() or [] # TODO: Test def starball(self, user, repo, archive='tar', ref='master'): """Get link for archive download. :param str user: GitHub user name :param str repo: GitHub repo name :param str archive: Archive format [tar|zip] :param str ref: Git reference :returns: tuple: Tuple of headers and file location """ # github3 archive method writes file to disk repository = self.repo(user, repo) url = repository._build_url(archive + 'ball', ref, base_url=repository._api) resp = repository._get(url, allow_redirects=True, stream=True) return resp.headers, resp.content ######### # Hooks # ######### def hooks(self, user, repo): """List webhooks :param str user: GitHub user name :param str repo: GitHub repo name :return list: List of commit dicts from GitHub; see http://developer.github.com/v3/repos/hooks/#json-http """ return self.repo(user, repo).hooks() def add_hook(self, user, repo, name, config, events=None, active=True): """Create a webhook. :param str user: GitHub user name :param str repo: GitHub repo name :return dict: Hook info from GitHub: see see http://developer.github.com/v3/repos/hooks/#json-http """ try: hook = self.repo(user, repo).create_hook(name, config, events, active) except github3.GitHubError: # TODO Handle this case - if '20 hooks' in e.errors[0].get('message'): return None else: return hook def delete_hook(self, user, repo, _id): """Delete a webhook. :param str user: GitHub user name :param str repo: GitHub repo name :return bool: True if successful, False otherwise :raises: NotFoundError if repo or hook cannot be located """ repo = self.repo(user, repo) hook = repo.hook(_id) if hook is None: raise NotFoundError return repo.hook(_id).delete() ######## # Auth # ######## def METHOD_NAME(self): if self.access_token: return self.gh3.revoke_authorization(self.access_token) def check_authorization(self): """Check an authorization created by a registered application. OAuth applications can use this method to check token validity without hitting normal rate limits because of failed login attempts. If the token is valid, it will return True, otherwise it will return False. :returns: True if token is valid, False otherwise :rtype: bool """ if self.access_token: url = self.gh3._build_url('user') resp = self.gh3._get( url, headers={ 'Authorization': f'token {self.access_token}', 'Accept': 'application/vnd.github.v3+json' }, ) if resp and resp.status_code == 200: return True return False def ref_to_params(branch=None, sha=None): params = urlencode({ key: value for key, value in { 'branch': branch, 'sha': sha, }.items() if value }) if params: return '?' + params return ''
null
1,410
# coding=utf-8 # Copyright 2018-2023 EvaDB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import pandas as pd from evadb.catalog.catalog_type import ColumnType from evadb.expression.abstract_expression import ExpressionType from evadb.expression.comparison_expression import ComparisonExpression from evadb.expression.constant_value_expression import ConstantValueExpression from evadb.models.storage.batch import Batch class ComparisonExpressionsTest(unittest.TestCase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # create a dummy batch self.batch = Batch(pd.DataFrame([0])) def test_comparison_compare_equal(self): const_exp1 = ConstantValueExpression(1) const_exp2 = ConstantValueExpression(1) cmpr_exp = ComparisonExpression( ExpressionType.COMPARE_EQUAL, const_exp1, const_exp2 ) self.assertEqual([True], cmpr_exp.evaluate(self.batch).frames[0].tolist()) self.assertNotEqual(str(cmpr_exp), None) def test_comparison_compare_greater(self): const_exp1 = ConstantValueExpression(1) const_exp2 = ConstantValueExpression(0) cmpr_exp = ComparisonExpression( ExpressionType.COMPARE_GREATER, const_exp1, const_exp2 ) self.assertEqual([True], cmpr_exp.evaluate(self.batch).frames[0].tolist()) self.assertNotEqual(str(cmpr_exp), None) def test_comparison_compare_lesser(self): const_exp1 = ConstantValueExpression(0) const_exp2 = ConstantValueExpression(2) cmpr_exp = ComparisonExpression( ExpressionType.COMPARE_LESSER, const_exp1, const_exp2 ) self.assertEqual([True], cmpr_exp.evaluate(self.batch).frames[0].tolist()) self.assertNotEqual(str(cmpr_exp), None) def test_comparison_compare_geq(self): const_exp1 = ConstantValueExpression(1) const_exp2 = ConstantValueExpression(1) const_exp3 = ConstantValueExpression(0) cmpr_exp1 = ComparisonExpression( ExpressionType.COMPARE_GEQ, const_exp1, const_exp2 ) cmpr_exp2 = ComparisonExpression( ExpressionType.COMPARE_GEQ, const_exp1, const_exp3 ) # checking equal self.assertEqual([True], cmpr_exp1.evaluate(self.batch).frames[0].tolist()) # checking greater equal self.assertEqual([True], cmpr_exp2.evaluate(self.batch).frames[0].tolist()) self.assertNotEqual(str(cmpr_exp1), None) def test_comparison_compare_leq(self): const_exp1 = ConstantValueExpression(0) const_exp2 = ConstantValueExpression(2) const_exp3 = ConstantValueExpression(2) cmpr_exp1 = ComparisonExpression( ExpressionType.COMPARE_LEQ, const_exp1, const_exp2 ) cmpr_exp2 = ComparisonExpression( ExpressionType.COMPARE_LEQ, const_exp2, const_exp3 ) # checking lesser self.assertEqual([True], cmpr_exp1.evaluate(self.batch).frames[0].tolist()) # checking equal self.assertEqual([True], cmpr_exp2.evaluate(self.batch).frames[0].tolist()) self.assertNotEqual(str(cmpr_exp1), None) def test_comparison_compare_neq(self): const_exp1 = ConstantValueExpression(0) const_exp2 = ConstantValueExpression(1) cmpr_exp = ComparisonExpression( ExpressionType.COMPARE_NEQ, const_exp1, const_exp2 ) self.assertEqual([True], cmpr_exp.evaluate(self.batch).frames[0].tolist()) self.assertNotEqual(str(cmpr_exp), None) def METHOD_NAME(self): const_exp1 = ConstantValueExpression([1, 2], ColumnType.NDARRAY) const_exp2 = ConstantValueExpression([1, 5], ColumnType.NDARRAY) const_exp3 = ConstantValueExpression([1, 2, 3, 4], ColumnType.NDARRAY) cmpr_exp1 = ComparisonExpression( ExpressionType.COMPARE_CONTAINS, const_exp3, const_exp1 ) self.assertEqual([True], cmpr_exp1.evaluate(self.batch).frames[0].tolist()) cmpr_exp2 = ComparisonExpression( ExpressionType.COMPARE_CONTAINS, const_exp3, const_exp2 ) self.assertEqual([False], cmpr_exp2.evaluate(self.batch).frames[0].tolist()) self.assertNotEqual(str(cmpr_exp1), None) def test_comparison_compare_is_contained(self): const_exp1 = ConstantValueExpression([1, 2], ColumnType.NDARRAY) const_exp2 = ConstantValueExpression([1, 5], ColumnType.NDARRAY) const_exp3 = ConstantValueExpression([1, 2, 3, 4], ColumnType.NDARRAY) cmpr_exp1 = ComparisonExpression( ExpressionType.COMPARE_IS_CONTAINED, const_exp1, const_exp3 ) self.assertEqual([True], cmpr_exp1.evaluate(self.batch).frames[0].tolist()) cmpr_exp2 = ComparisonExpression( ExpressionType.COMPARE_IS_CONTAINED, const_exp2, const_exp3 ) self.assertEqual([False], cmpr_exp2.evaluate(self.batch).frames[0].tolist()) self.assertNotEqual(str(cmpr_exp1), None)
null
1,411
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkmts.endpoint import endpoint_data class AddSmarttagTemplateRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Mts', '2014-06-18', 'AddSmarttagTemplate','mts') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ResourceOwnerId(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_KnowledgeConfig(self): # String return self.get_query_params().get('KnowledgeConfig') def set_KnowledgeConfig(self, KnowledgeConfig): # String self.add_query_param('KnowledgeConfig', KnowledgeConfig) def get_Industry(self): # String return self.get_query_params().get('Industry') def set_Industry(self, Industry): # String self.add_query_param('Industry', Industry) def get_LabelVersion(self): # String return self.get_query_params().get('LabelVersion') def set_LabelVersion(self, LabelVersion): # String self.add_query_param('LabelVersion', LabelVersion) def get_Scene(self): # String return self.get_query_params().get('Scene') def set_Scene(self, Scene): # String self.add_query_param('Scene', Scene) def get_FaceCustomParamsConfig(self): # String return self.get_query_params().get('FaceCustomParamsConfig') def set_FaceCustomParamsConfig(self, FaceCustomParamsConfig): # String self.add_query_param('FaceCustomParamsConfig', FaceCustomParamsConfig) def get_TemplateName(self): # String return self.get_query_params().get('TemplateName') def set_TemplateName(self, TemplateName): # String self.add_query_param('TemplateName', TemplateName) def get_IsDefault(self): # Boolean return self.get_query_params().get('IsDefault') def set_IsDefault(self, IsDefault): # Boolean self.add_query_param('IsDefault', IsDefault) def get_FaceCategoryIds(self): # String return self.get_query_params().get('FaceCategoryIds') def set_FaceCategoryIds(self, FaceCategoryIds): # String self.add_query_param('FaceCategoryIds', FaceCategoryIds) def get_KeywordConfig(self): # String return self.get_query_params().get('KeywordConfig') def set_KeywordConfig(self, KeywordConfig): # String self.add_query_param('KeywordConfig', KeywordConfig) def get_LandmarkGroupIds(self): # String return self.get_query_params().get('LandmarkGroupIds') def set_LandmarkGroupIds(self, LandmarkGroupIds): # String self.add_query_param('LandmarkGroupIds', LandmarkGroupIds) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def set_OwnerAccount(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_ObjectGroupIds(self): # String return self.get_query_params().get('ObjectGroupIds') def set_ObjectGroupIds(self, ObjectGroupIds): # String self.add_query_param('ObjectGroupIds', ObjectGroupIds) def METHOD_NAME(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_AnalyseTypes(self): # String return self.get_query_params().get('AnalyseTypes') def set_AnalyseTypes(self, AnalyseTypes): # String self.add_query_param('AnalyseTypes', AnalyseTypes) def get_LabelType(self): # String return self.get_query_params().get('LabelType') def set_LabelType(self, LabelType): # String self.add_query_param('LabelType', LabelType)
null
1,412
import sblc import os # set working dir to SBL's dataPath # fix(later): should do this directly from C++ code os.chdir( sblc.dataPath() ) # convert a python value to a string value suitable to for SBL commands/configs def strProc( val ): valStr = str( val ); if valStr == "False": valStr = "0" if valStr == "True": valStr = "1" return valStr # represents an entry in a configuration file class ConfigEntry: # basic constructor def __init__( self, _name, _value, _comment ): self.name = _name self.value = _value self.comment = _comment # represents a configuration file class Config: # basic constructor def __init__( self ): # print( "adding entries" ) # self._entries = [] self.__dict__[ "_entries" ] = [] # add a config entry def __setattr__( self, name, value ): if not name.startswith( "_" ): found = False for e in self._entries: # fix(later): use dict (though want to maintain order) if e.name == name: e.value = value found = True if not found: self._entries.append( ConfigEntry( name, value, "" ) ) # elif name == "_entries": # print( "adding entries again" ); # self.__dict__[ "_entries" ] = [] # read a config entry def __getattr__( self, name ): if not name.startswith( "_" ): for e in self._entries: # fix(later): use dict (though want to maintain order) if e.name == name: return e.value raise AttributeError # create a string version suitable for passing to an SBL command def __str__( self ): s = "" for e in self._entries: if e.name: s += e.name + "=" + strProc( e.value ) + " " return s # load a configuration file (in SBL format) def load( self, fileName ): f = open( fileName, "r" ) if f: for line in f: line = line.strip() # get comments/meta-data preComment = line comment = "" if '[' in line: split = line.split( '[', 1 ) preComment = split[ 0 ] comment = "[" + split[ 1 ] elif '#' in line: split = line.split( '#', 1 ) preComment = split[ 0 ] comment = "#" + split[ 1 ] # get name and value (if any) name = "" value = "" split = preComment.split() if len( split ) >= 2: name = split[ 0 ] value = split[ 1 ] # append an entry (even for blank lines) self._entries.append( ConfigEntry( name, value, comment ) ) # save this configuration file (in SBL format) def save( self, fileName ): f = open( fileName, "w" ) if f: for e in self._entries: if e.name: f.write( e.name ) f.write( " " ) f.write( strProc( e.value ) ) if e.comment: f.write( " " ) if e.comment: f.write( e.comment ) f.write( "\n" ) # provides a simple interface to SBL commands class CommandRouter: # return true if user has requested that the current command stop running def checkCommandCancel( self ): return sblc.checkCommandEvents() # display a message def disp( self, indent, message ): sblc.disp( 0, indent, message ) # display a warning def METHOD_NAME( self, message ): sblc.disp( 1, 0, message ) # display a fatal error (will terminate program) def fatalError( self, message ): sblc.disp( 2, 0, message ) # assume all other method calls are commands; send to SBL C++ command system def __getattr__( self, name ): if not name.startswith( "_" ): def runCommand( *args, **keywords ): cmdStr = name + " " + " ".join( [strProc( a ) for a in args] ) sblc.execCommand( cmdStr ) return runCommand else: raise AttributeError
null
1,413
""" SoftLayer.tests.CLI.modules.security_tests ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :license: MIT, see LICENSE for more details. """ import json import os.path import sys import tempfile from unittest import mock as mock from SoftLayer.CLI import exceptions from SoftLayer import testing class SecurityTests(testing.TestCase): def test_add_sshkey_without_key_errors(self): result = self.run_command(['security', 'sshkey-add', 'key1']) self.assertEqual(result.exit_code, 2) self.assertIsInstance(result.exception, exceptions.ArgumentError) def test_add_sshkey_with_key_file_and_key_argument_errors(self): path = os.path.join(testing.FIXTURE_PATH, 'id_rsa.pub') result = self.run_command(['security', 'sshkey-add', 'key1', '--key=some_key', '--in-file=%s' % path]) self.assertEqual(result.exit_code, 2) self.assertIsInstance(result.exception, exceptions.ArgumentError) def test_add_sshkey_by_option(self): service = self.client['Security_Ssh_Key'] mock_key = service.getObject()['key'] result = self.run_command(['security', 'sshkey-add', 'key1', '--key=%s' % mock_key, '--note=my key']) self.assert_no_fail(result) self.assertEqual(json.loads(result.output), "SSH key added: aa:bb:cc:dd") self.assert_called_with('SoftLayer_Security_Ssh_Key', 'createObject', args=({'notes': 'my key', 'key': mock_key, 'label': 'key1'},)) def test_add_sshkey_by_file(self): path = os.path.join(testing.FIXTURE_PATH, 'id_rsa.pub') result = self.run_command(['security', 'sshkey-add', 'key1', '--in-file=%s' % path]) self.assert_no_fail(result) self.assertEqual(json.loads(result.output), "SSH key added: aa:bb:cc:dd") service = self.client['Security_Ssh_Key'] mock_key = service.getObject()['key'] self.assert_called_with('SoftLayer_Security_Ssh_Key', 'createObject', args=({'notes': None, 'key': mock_key, 'label': 'key1'},)) def test_remove_sshkey_key(self): result = self.run_command(['--really', 'security', 'sshkey-remove', '1234']) self.assert_no_fail(result) self.assert_called_with('SoftLayer_Security_Ssh_Key', 'deleteObject', identifier=1234) @mock.patch('SoftLayer.CLI.formatting.no_going_back') def test_remove_sshkey_fail(self, ngb_mock): ngb_mock.return_value = False result = self.run_command(['security', 'sshkey-remove', '1234']) self.assertEqual(result.exit_code, 2) def test_edit_sshkey(self): result = self.run_command(['security', 'sshkey-edit', '1234', '--label=key1', '--note=my key']) self.assert_no_fail(result) self.assert_called_with('SoftLayer_Security_Ssh_Key', 'editObject', args=({'notes': 'my key', 'label': 'key1'},), identifier=1234) def METHOD_NAME(self): fixture = self.set_mock('SoftLayer_Security_Ssh_Key', 'editObject') fixture.return_value = False result = self.run_command(['security', 'sshkey-edit', '1234', '--label=key1', '--note=my key']) self.assertEqual(result.exit_code, 2) def test_list_sshkeys(self): result = self.run_command(['security', 'sshkey-list']) self.assert_no_fail(result) self.assertEqual(json.loads(result.output), [{'notes': '-', 'fingerprint': None, 'id': '100', 'label': 'Test 1'}, {'notes': 'my key', 'fingerprint': None, 'id': '101', 'label': 'Test 2'}]) def test_print_sshkey(self): result = self.run_command(['security', 'sshkey-print', '1234']) self.assert_no_fail(result) self.assertEqual(json.loads(result.output), {'id': 1234, 'label': 'label', 'notes': 'notes'}) def test_print_sshkey_file(self): if sys.platform.startswith("win"): self.skipTest("Test doesn't work in Windows") with tempfile.NamedTemporaryFile() as sshkey_file: service = self.client['Security_Ssh_Key'] mock_key = service.getObject()['key'] result = self.run_command(['security', 'sshkey-print', '1234', '--out-file=%s' % sshkey_file.name]) self.assert_no_fail(result) self.assertEqual(mock_key, sshkey_file.read().decode("utf-8")) def test_list_certficates(self): result = self.run_command(['security', 'cert-list', '--status', 'all']) self.assert_no_fail(result) self.assertEqual(json.loads(result.output), [ { "id": 1234, "common_name": "cert", "days_until_expire": 0, "notes": None } ]) @mock.patch('SoftLayer.CLI.formatting.no_going_back') def test_remove_certficate(self, confirm_mock): confirm_mock.return_value = True result = self.run_command(['security', 'cert-remove', '123456']) self.assert_no_fail(result) self.assertEqual(result.exit_code, 0) def test_download_certficate(self): result = self.run_command(['security', 'cert-download', '123456']) self.assert_no_fail(result) self.assertEqual(result.exit_code, 0)
null
1,414
"""Tests related to content upload.""" import hashlib import uuid import pytest import os from random import shuffle from pulpcore.client.pulpcore import ApiException @pytest.fixture def pulpcore_random_chunked_file_factory(tmp_path): """Returns a function to create random chunks to be uploaded.""" def _create_chunks(number_chunks=2, chunk_sizes=None): # Default to 512 byte chunk sizes if chunk_sizes: if len(chunk_sizes) != number_chunks: raise Exception("number_chunks != len(chunk_sizes)") else: chunk_sizes = [512] * number_chunks chunks = {"size": sum(chunk_sizes), "chunks": []} hasher = hashlib.new("sha256") start = 0 for chunk_size in chunk_sizes: name = tmp_path / str(uuid.uuid4()) with open(name, "wb") as f: content = os.urandom(chunk_size) hasher.update(content) f.write(content) f.flush() content_sha = hashlib.sha256(content).hexdigest() end = start + chunk_size - 1 chunks["chunks"].append((name, f"bytes {start}-{end}/{chunks['size']}", content_sha)) start = start + chunk_size chunks["digest"] = hasher.hexdigest() return chunks return _create_chunks @pytest.fixture def pulpcore_upload_chunks( uploads_api_client, artifacts_api_client, gen_object_with_cleanup, tasks_api_client, monitor_task, ): """Upload file in chunks.""" artifacts = [] def _upload_chunks(size, chunks, sha256, include_chunk_sha256=False): """ Chunks is a list of tuples in the form of (chunk_filename, "bytes-ranges", optional_sha256). """ upload = gen_object_with_cleanup(uploads_api_client, {"size": size}) for data in chunks: kwargs = {"file": data[0], "content_range": data[1], "upload_href": upload.pulp_href} if include_chunk_sha256: if len(data) != 3: raise Exception(f"Chunk didn't include its sha256: {data}") kwargs["sha256"] = data[2] uploads_api_client.update(**kwargs) finish_task = uploads_api_client.commit(upload.pulp_href, {"sha256": sha256}).task response = monitor_task(finish_task) artifact_href = response.created_resources[0] artifact = artifacts_api_client.read(artifact_href) artifacts.append(artifact_href) return upload, artifact yield _upload_chunks for href in artifacts: try: artifacts_api_client.delete(href) except ApiException: pass @pytest.mark.parallel def test_create_artifact_without_checksum( pulpcore_upload_chunks, pulpcore_random_chunked_file_factory ): """Test creation of artifact using upload of files in chunks.""" file_chunks_data = pulpcore_random_chunked_file_factory() size = file_chunks_data["size"] chunks = file_chunks_data["chunks"] shuffle(chunks) sha256 = file_chunks_data["digest"] _, artifact = pulpcore_upload_chunks(size, chunks, sha256) assert artifact.sha256 == sha256 @pytest.mark.parallel def test_create_artifact_passing_checksum( pulpcore_upload_chunks, pulpcore_random_chunked_file_factory ): """Test creation of artifact using upload of files in chunks passing checksum.""" file_chunks_data = pulpcore_random_chunked_file_factory(number_chunks=5) size = file_chunks_data["size"] chunks = file_chunks_data["chunks"] shuffle(chunks) sha256 = file_chunks_data["digest"] _, artifact = pulpcore_upload_chunks(size, chunks, sha256, include_chunk_sha256=True) assert artifact.sha256 == sha256 @pytest.mark.parallel def test_upload_chunk_wrong_checksum( uploads_api_client, pulpcore_random_chunked_file_factory, gen_object_with_cleanup ): """Test creation of artifact using upload of files in chunks passing wrong checksum.""" file_chunks_data = pulpcore_random_chunked_file_factory() size = file_chunks_data["size"] chunks = file_chunks_data["chunks"] upload = gen_object_with_cleanup(uploads_api_client, {"size": size}) for data in chunks: kwargs = {"file": data[0], "content_range": data[1], "upload_href": upload.pulp_href} kwargs["sha256"] = "WRONG CHECKSUM" with pytest.raises(ApiException) as e: uploads_api_client.update(**kwargs) assert e.value.status == 400 @pytest.mark.parallel def METHOD_NAME( uploads_api_client, pulpcore_random_chunked_file_factory, gen_object_with_cleanup ): """Test upload responses when creating an upload and uploading chunks.""" file_chunks_data = pulpcore_random_chunked_file_factory(chunk_sizes=[6291456, 4194304]) upload = gen_object_with_cleanup(uploads_api_client, {"size": file_chunks_data["size"]}) expected_keys = ["pulp_href", "pulp_created", "size"] for key in expected_keys: assert getattr(upload, key) for data in file_chunks_data["chunks"]: kwargs = {"file": data[0], "content_range": data[1], "upload_href": upload.pulp_href} response = uploads_api_client.update(**kwargs) for key in expected_keys: assert getattr(response, key) upload = uploads_api_client.read(upload.pulp_href) expected_keys.append("chunks") for key in expected_keys: assert getattr(upload, key) expected_chunks = [ {"offset": 0, "size": 6291456}, {"offset": 6291456, "size": 4194304}, ] sorted_chunks_response = sorted([c.to_dict() for c in upload.chunks], key=lambda i: i["offset"]) assert sorted_chunks_response == expected_chunks @pytest.mark.parallel def test_delete_upload( uploads_api_client, pulpcore_upload_chunks, pulpcore_random_chunked_file_factory ): """Check whether uploads are being correctly deleted after committing.""" file_chunks_data = pulpcore_random_chunked_file_factory() size = file_chunks_data["size"] chunks = file_chunks_data["chunks"] shuffle(chunks) sha256 = file_chunks_data["digest"] upload, _ = pulpcore_upload_chunks(size, chunks, sha256) with pytest.raises(ApiException) as e: uploads_api_client.read(upload.pulp_href) assert e.value.status == 404
null
1,415
# -*- coding: utf-8 -*- from app import LOGGER import json def _get_answer_value(answer, question, question_translation): if answer is None: if question_translation.language == 'fr': # TODO: Add proper language support for back-end text return 'Aucune réponse fournie' else: return 'No answer provided' if question.type == 'multi-choice' and question_translation.options is not None: value = [o for o in question_translation.options if o['value'] == answer.value] if not value: return answer.value return value[0]['label'] if question.type == 'file' and answer.value: if question_translation.language == 'fr': return 'Fichier téléchargé' else: return 'Uploaded File' if question.type == 'multi-file' and answer.value: file_info = json.loads(answer.value) return "\n".join([f['name'] for f in file_info]) if question.type == 'information': return "" return answer.value def _find_answer(question, answers): if question == None: return None answer = [a for a in answers if a.question_id == question.id] if answer: return answer[0] else: return None def METHOD_NAME(question_id, questions): question = [q for q in questions if q.id == question_id] if question: return question[0] else: return None def build_response_email_body(answers, language, application_form): #stringifying the dictionary summary, with linebreaks between question/answer pairs stringified_summary = "" allQuestions = [q for section in application_form.sections for q in section.questions] for section in application_form.sections: if not section.questions: continue section_translation = section.get_translation(language) if section_translation is None: LOGGER.error('Missing {} translation for section {}.'.format(language, section.id)) section_translation = section.get_translation('en') stringified_summary += section_translation.name + '\n' + '-' * 20 + '\n\n' for question in section.questions: question_translation = question.get_translation(language) if question_translation is None: LOGGER.error('Missing {} translation for question {}.'.format(language, question.id)) question_translation = question.get_translation('en') if question.depends_on_question_id and question_translation.show_for_values: dependency_question = METHOD_NAME(question.depends_on_question_id, allQuestions) dependency_answer = _find_answer(dependency_question, answers) if dependency_answer and dependency_answer not in question_translation.show_for_values: continue answer = _find_answer(question, answers) if answer: answer_value = _get_answer_value(answer, answer.question, question_translation) stringified_summary += '{question}\n{answer}\n\n'.format(question=question_translation.headline, answer=answer_value) return stringified_summary def build_response_html_app_info(response, language): """ Stringifying the application information, for output in a html file, with the response_id and applicant name contact info as paragraphs (<p>) """ stringified_app_info = f"<p><b> Response ID:</b> {response.id}</p> <p><b>Full name:</b> {response.user.firstname} {response.user.lastname}</p>" return "<title>Application Responses</title>" + stringified_app_info def build_response_html_answers(answers, language, application_form): """ Stringifying the dictionary answers, for output in a html file, with sections as headers(<h1>), questions as second headings (<h2>) and answers as paragraphs (<p>) """ stringified_answers = "" for section in application_form.sections: if not section.questions: continue section_translation = section.get_translation(language) if section_translation is None: LOGGER.error('Missing {} translation for section {}.'.format(language, section.id)) section_translation = section.get_translation('en') stringified_answers += '<h1>' + section_translation.name + '</h1>' for question in section.questions: question_translation = question.get_translation(language) if question_translation is None: LOGGER.error('Missing {} translation for question {}.'.format(language, question.id)) question_translation = question.get_translation('en') answer = _find_answer(question, answers) if answer: answer_value = _get_answer_value(answer, answer.question, question_translation) stringified_answers += f"<h2> {question_translation.headline} </h2> <p>{answer_value}</p>" return stringified_answers
null
1,416
"""Use the Python pygments library to perform extra checks on C++ grammar.""" from pygments import token from pygments.lexers.compiled import CppLexer import os def check_header_file(fh_name, project_name, errors): """Check a single C++ header file""" _check_file(fh_name, project_name, True, errors) def check_cpp_file(fh_name, project_name, errors): """Check a single C++ source file""" _check_file(fh_name, project_name, False, errors) def _check_file(fh_name, project_name, header, errors): fh, filename = fh_name s = METHOD_NAME(fh) check_tokens(s, filename, project_name, header, errors) def METHOD_NAME(fh): """Use the Python pygments library to tokenize a C++ file""" code = fh.read() c = CppLexer() scan = [] for (index, tok, value) in c.get_tokens_unprocessed(code): scan.append((tok, value)) return scan def check_tokens(scan, filename, project_name, header, errors): if filename.find("test_") == -1: # we don't do it for python tests check_comment_header(scan, filename, errors) if header: # Handle older versions of pygments which concatenate \n and # tokens if len(scan) >= 3 and scan[2][0] == token.Comment.Preproc \ and scan[2][1] == '\n#': scan[2] = (token.Comment.Preproc, '#') scan.insert(2, (token.Comment.Text, '\n')) check_header_start_end(scan, filename, project_name, errors) def check_comment_header(scan, filename, errors): if len(scan) < 1 or scan[0][0] not in (token.Comment, token.Comment.Multiline): errors.append('%s:1: First line should be a comment with a copyright ' 'notice and a description of the file' % filename) def have_header_guard(scan): return len(scan) >= 11 \ and scan[4][0] == token.Comment.Preproc \ and scan[4][1].startswith('ifndef') \ and scan[7][0] == token.Comment.Preproc \ and scan[7][1].startswith('define') \ and scan[-3][0] == token.Comment.Preproc \ and scan[-3][1].startswith('endif') \ and scan[-2][0] in (token.Comment, token.Comment.Multiline) def get_header_guard(filename, project_name): """Get prefix and suffix for header guard""" guard_prefix = project_name.replace(".", "").upper() guard_suffix = os.path.split(filename)[1].replace(".", "_").upper() return guard_prefix, guard_suffix def check_header_start_end(scan, filename, project_name, errors): guard_prefix, guard_suffix = get_header_guard(filename, project_name) header_guard = guard_prefix + '_' + guard_suffix if len(scan) < 11: bad = True else: bad = False if not scan[4][0] == token.Comment.Preproc: bad = True if not scan[4][1].startswith('ifndef'): errors.append('%s:%d: Header guard missing #ifndef.' % (filename, 1)) bad = True if not scan[7][0] == token.Comment.Preproc: bad = True if not scan[7][1].startswith('define'): errors.append('%s:%d: Header guard missing #define.' % (filename, 1)) bad = True if not scan[-3][0] == token.Comment.Preproc \ and not scan[-4][0] == token.Comment.Preproc: bad = True if not scan[-3][1].startswith('endif') \ and not scan[-4][1].startswith('endif'): errors.append('%s:%d: Header guard missing #endif.' % (filename, 1)) bad = True if not scan[-2][0] in (token.Comment, token.Comment.Multiline) \ and not scan[-3][0] in (token.Comment, token.Comment.Multiline): errors.append('%s:%d: Header guard missing closing comment.' % (filename, 1)) bad = True guard = scan[4][1][7:] if not guard.startswith(guard_prefix): errors.append('%s:%d: Header guard does not start with "%s".' % (filename, 1, guard_prefix)) bad = True if not guard.replace("_", "").endswith(guard_suffix.replace("_", "")): errors.append('%s:%d: Header guard does not end with "%s".' % (filename, 1, guard_suffix)) bad = True if not scan[7][1] == 'define ' + guard: errors.append('%s:%d: Header guard does not define "%s".' % (filename, 1, guard)) bad = True if not scan[-2][1] == '/* %s */' % guard \ and not scan[-3][1] == '/* %s */' % guard: errors.append('%s:%d: Header guard close does not have a ' 'comment of "/* %s */".' % (filename, 1, guard)) bad = True if bad: errors.append('%s:%d: Missing or incomplete header guard.' % (filename, 1) + """ Header files should start with a comment, then a blank line, then the rest of the file wrapped with a header guard. This must start with %s and end with %s - in between can be placed extra qualifiers, e.g. for a namespace. For example, /** Copyright and file description */ #ifndef %s #define %s ... #endif /* %s */ """ % (guard_prefix, guard_suffix, header_guard, header_guard, header_guard))
null
1,417
import numbers from decimal import Decimal from random import randint from typing import Any, Dict, Optional from pydantic import Field, SecretStr import hummingbot.connector.exchange.vertex.vertex_constants as CONSTANTS from hummingbot.client.config.config_data_types import BaseConnectorConfigMap, ClientFieldData from hummingbot.core.data_type.trade_fee import TradeFeeSchema CENTRALIZED = True USE_ETHEREUM_WALLET = False EXAMPLE_PAIR = "WBTC-USDC" DEFAULT_FEES = TradeFeeSchema( maker_percent_fee_decimal=Decimal("0.0"), taker_percent_fee_decimal=Decimal("0.0002"), ) def hex_to_bytes32(hex_string: str) -> bytes: if hex_string.startswith("0x"): hex_string = hex_string[2:] data_bytes = bytes.fromhex(hex_string) padded_data = data_bytes + b"\x00" * (32 - len(data_bytes)) return padded_data def convert_timestamp(timestamp: Any) -> float: return float(timestamp) / 1e9 def trading_pair_to_product_id(trading_pair: str, exchange_market_info: Dict, is_perp: Optional[bool] = False) -> int: tp = trading_pair.replace("-", "/") for product_id in exchange_market_info: if is_perp and "perp" not in exchange_market_info[product_id]["symbol"].lower(): continue if exchange_market_info[product_id]["market"] == tp: return product_id return -1 def market_to_trading_pair(market: str) -> str: """Converts a market symbol from Vertex to a trading pair.""" return market.replace("/", "-") def convert_from_x18(data: Any, precision: Optional[Decimal] = None) -> Any: """ Converts numerical data encoded as x18 to a string representation of a floating point number, resursively applies the conversion for other data types. """ if data is None: return None # Check if data type is str or float if isinstance(data, str) or isinstance(data, numbers.Number): data = Decimal(data) / Decimal("1000000000000000000") # type: ignore if precision: data = data.quantize(precision) return str(data) if isinstance(data, dict): for k, v in data.items(): data[k] = convert_from_x18(v, precision) elif isinstance(data, list): for i in range(0, len(data)): data[i] = convert_from_x18(data[i], precision) else: raise TypeError("Data is of unsupported type for convert_from_x18 to process", data) return data def convert_to_x18(data: Any, precision: Optional[Decimal] = None) -> Any: """ Converts numerical data encoded to a string representation of x18, resursively applies the conversion for other data types. """ if data is None: return None # Check if data type is str or float if isinstance(data, str) or isinstance(data, numbers.Number): data = Decimal(str(data)) # type: ignore if precision: data = data.quantize(precision) return str((data * Decimal("1000000000000000000")).quantize(Decimal("1"))) if isinstance(data, dict): for k, v in data.items(): data[k] = convert_to_x18(v, precision) elif isinstance(data, list): for i in range(0, len(data)): data[i] = convert_to_x18(data[i], precision) else: raise TypeError("Data is of unsupported type for convert_to_x18 to process", data) return data def generate_expiration(timestamp: float = None, order_type: Optional[str] = None) -> str: default_max_time = 8640000000000000 # NOTE: Forever default_day_time = 86400 # Default significant bit is 0 for GTC # https://vertex-protocol.gitbook.io/docs/developer-resources/api/websocket-rest-api/executes/place-order sig_bit = 0 if order_type == CONSTANTS.TIME_IN_FORCE_IOC: sig_bit = 1 elif order_type == CONSTANTS.TIME_IN_FORCE_FOK: sig_bit = 2 elif order_type == CONSTANTS.TIME_IN_FORCE_POSTONLY: sig_bit = 3 # NOTE: We can setup maxtime expiration = str(default_max_time | (sig_bit << 62)) if timestamp: unix_epoch = int(timestamp) expiration = str((unix_epoch + default_day_time) | (sig_bit << 62)) return expiration def METHOD_NAME(timestamp: float, expiry_ms: int = 90) -> int: unix_epoch_ms = int((timestamp * 1000) + (expiry_ms * 1000)) nonce = (unix_epoch_ms << 20) + randint(1, 1001) return nonce def convert_address_to_sender(address: str) -> str: # NOTE: the sender address includes the subaccount, which is "default" by default, you cannot interact with # subaccounts outside of default on the UI currently. # https://vertex-protocol.gitbook.io/docs/developer-resources/api/websocket-rest-api/executes#signing if isinstance(address, str): default_12bytes = "64656661756c740000000000" return address + default_12bytes raise TypeError("Address must be of type string") def is_exchange_information_valid(exchange_info: Dict[str, Any]) -> bool: """ Default's to true, there isn't anything to check agaisnt. """ return True class VertexConfigMap(BaseConnectorConfigMap): connector: str = Field(default="vertex", const=True, client_data=None) vertex_arbitrum_private_key: SecretStr = Field( default=..., client_data=ClientFieldData( prompt=lambda cm: "Enter your Arbitrum private key", is_secure=True, is_connect_key=True, prompt_on_new=True, ), ) vertex_arbitrum_address: str = Field( default=..., client_data=ClientFieldData( prompt=lambda cm: "Enter your Arbitrum wallet address", is_secure=False, is_connect_key=True, prompt_on_new=True, ), ) # NOTE: Vertex allows for spot leverage # vertex_spot_leverage: bool = Field( # default=False, # client_data=ClientFieldData( # prompt=lambda cm: "Enable spot leverage? This auto-borrows assets against your margin to trade with larger size. Set to True to enable borrowing (default: False).", # is_secure=False, # is_connect_key=False, # prompt_on_new=True, # ), # ) class Config: title = "vertex" KEYS = VertexConfigMap.construct() class VertexTestnetConfigMap(BaseConnectorConfigMap): connector: str = Field(default="vertex_testnet", client_data=None) vertex_testnet_arbitrum_private_key: SecretStr = Field( default=..., client_data=ClientFieldData( prompt=lambda cm: "Enter your Arbitrum TESTNET private key", is_secure=True, is_connect_key=True, prompt_on_new=True, ), ) vertex_testnet_arbitrum_address: str = Field( default=..., client_data=ClientFieldData( prompt=lambda cm: "Enter your Arbitrum TESTNET wallet address", is_secure=False, is_connect_key=True, prompt_on_new=True, ), ) # vertex_testnet_spot_leverage: bool = Field( # default=False, # client_data=ClientFieldData( # prompt=lambda cm: "Enable spot leverage? This auto-borrows assets against your margin to trade with larger size. Set to True to enable borrowing (default: False).", # is_secure=False, # is_connect_key=False, # prompt_on_new=True, # ), # ) class Config: title = "vertex_testnet" OTHER_DOMAINS = ["vertex_testnet"] OTHER_DOMAINS_PARAMETER = {"vertex_testnet": "vertex_testnet"} OTHER_DOMAINS_EXAMPLE_PAIR = {"vertex_testnet": "WBTC-USDC"} OTHER_DOMAINS_DEFAULT_FEES = {"vertex_testnet": DEFAULT_FEES} OTHER_DOMAINS_KEYS = {"vertex_testnet": VertexTestnetConfigMap.construct()}
null
1,418
#!/usr/bin/env python3 # # Dump type header for private types in c modules. # # Author: Malte Mues <[email protected]> import os import re import shutil import subprocess import sys import textwrap from tempfile import TemporaryDirectory DEFINE_REGEX_HEADER = re.compile(r"\s*#\s*define\s*([\w]+)") def collect_defines(c_file): """Collects all defines in a c module. This function should collect all defines inside a c module. Than the script will add these defines to the type header file. This allows to use the defines in the harness as well. Because preprocessing eliminates them before compiling the goto binary, it is not possible to extract them from the goto binary. We assume that a define is either a single define. For example #define my_macro 0 or eventually guarded by an ifdef, ifndef or if. For example: #ifdef another_macro #define my_macro #endif Any opening if* is expected to be close by an #endif. Further, it is expected that #if and #endif pairs are not nested. The third group of defines that this script tries to catch are multiline macros: #define aMacro( X ) \ if( ( X )->A == 1 ) \ { \ ( X )->B = 2; \ } The assumption is that '\' is the last character in the line. """ collector_result = "" with open(c_file, "r") as in_file: continue_define = False in_potential_def_scope = "" potential_define = False potential_define_confirmed = False for line in in_file: matched = DEFINE_REGEX_HEADER.match(line) if line.strip().startswith("#if"): potential_define = True in_potential_def_scope += line elif line.strip().startswith("#endif") and potential_define: if potential_define_confirmed: in_potential_def_scope += line collector_result += in_potential_def_scope in_potential_def_scope = "" potential_define = False potential_define_confirmed = False elif matched and potential_define: potential_define_confirmed = True in_potential_def_scope += line elif matched or continue_define: continue_define = line.strip("\n").endswith("\\") collector_result += line elif potential_define: in_potential_def_scope += line return collector_result def METHOD_NAME(c_file): base = os.path.basename(c_file) return base.split(".")[0] def make_header_file(goto_binary, c_file, header_out=None): with TemporaryDirectory() as tmpdir: module = METHOD_NAME(c_file) header_file = module + "_datastructure.h" drop_header_cmd = ["goto-instrument", "--dump-c-type-header", module, goto_binary, header_file] subprocess.run(drop_header_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, check=False, universal_newlines=True, cwd=tmpdir, shell=False) header_file = os.path.normpath(os.path.join(tmpdir, header_file)) with open(header_file, "a") as out: print(collect_defines(c_file), file=out) if header_out: absolut_header_target = os.path.normpath( os.path.abspath(header_out)) shutil.move(header_file, absolut_header_target) else: with open(header_file, "r") as header: print("".join(header.readlines())) def print_usage_and_exit(): print(textwrap.dedent("""\ This script extracts a type header for local types in a c file. It expects a goto binary compiled from the c file along with the original c file. extract_type_header.py my_goto_binary the_c_file [header_output] The header_output is an optional parameter specifying a target output file. Otherwise, the script is going to print the header to stdout. """)) sys.exit(1) if __name__ == '__main__': TARGET = None if len(sys.argv) < 3 or len(sys.argv) > 4: print_usage_and_exit() BINARY = sys.argv[1] if not os.path.isabs(BINARY): BINARY = os.path.normpath(os.path.join(os.getcwd(), BINARY)) FILE = sys.argv[2] if not os.path.isabs(FILE): FILE = os.path.normpath(os.path.join(os.getcwd(), FILE)) if len(sys.argv) == 4: TARGET = sys.argv[3] if not os.path.isabs(TARGET): TARGET= os.path.normpath(os.path.join(os.getcwd(), TARGET)) make_header_file(BINARY, FILE, TARGET)
null
1,419
# -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # # Copyright 2021-2022 Valory AG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ------------------------------------------------------------------------------ """Tests for valory/abci connection, tendermint_decoder module.""" # pylint: skip-file from packages.valory.connections.abci.connection import PUBLIC_ID from packages.valory.connections.abci.dialogues import AbciDialogue, AbciDialogues from packages.valory.connections.abci.tendermint.abci.types_pb2 import ( # type: ignore Request, RequestApplySnapshotChunk, RequestEcho, RequestListSnapshots, RequestLoadSnapshotChunk, RequestOfferSnapshot, RequestSetOption, Snapshot, ) from packages.valory.connections.abci.tendermint_decoder import ( _TendermintProtocolDecoder, ) from packages.valory.protocols.abci import AbciMessage class TestTendermintProtocolDecoder: """Test for the Tendermint protocol decoder.""" def test_request_echo(self) -> None: """Test decoding of a request echo.""" dialogues = AbciDialogues(connection_id=PUBLIC_ID) request = Request() echo = RequestEcho() echo.message = "" request.echo.CopyFrom(echo) message, dialogue = _TendermintProtocolDecoder.request_echo( request, dialogues, "counterparty" ) assert isinstance(message, AbciMessage) assert isinstance(dialogue, AbciDialogue) def test_request_set_option(self) -> None: """Test decoding of a request set-option.""" dialogues = AbciDialogues(connection_id=PUBLIC_ID) request = Request() set_option = RequestSetOption() set_option.key = "" set_option.value = "" request.set_option.CopyFrom(set_option) message, dialogue = _TendermintProtocolDecoder.request_set_option( request, dialogues, "counterparty" ) assert isinstance(message, AbciMessage) assert isinstance(dialogue, AbciDialogue) def test_request_list_snapshots(self) -> None: """Test decoding of a request list-snapshots.""" dialogues = AbciDialogues(connection_id=PUBLIC_ID) request = Request() list_snapshots = RequestListSnapshots() request.list_snapshots.CopyFrom(list_snapshots) message, dialogue = _TendermintProtocolDecoder.request_list_snapshots( request, dialogues, "counterparty" ) assert isinstance(message, AbciMessage) assert isinstance(dialogue, AbciDialogue) def METHOD_NAME(self) -> None: """Test decoding of a request offer-snapshot.""" dialogues = AbciDialogues(connection_id=PUBLIC_ID) request = Request() offer_snapshot = RequestOfferSnapshot() snapshot = Snapshot() offer_snapshot.snapshot.CopyFrom(snapshot) offer_snapshot.app_hash = b"" request.offer_snapshot.CopyFrom(offer_snapshot) message, dialogue = _TendermintProtocolDecoder.request_offer_snapshot( request, dialogues, "counterparty" ) assert isinstance(message, AbciMessage) assert isinstance(dialogue, AbciDialogue) def test_request_load_snapshot_chunk(self) -> None: """Test decoding of a request load-snapshot-chunk.""" dialogues = AbciDialogues(connection_id=PUBLIC_ID) request = Request() load_snapshot_chunk = RequestLoadSnapshotChunk() request.load_snapshot_chunk.CopyFrom(load_snapshot_chunk) message, dialogue = _TendermintProtocolDecoder.request_load_snapshot_chunk( request, dialogues, "counterparty" ) assert isinstance(message, AbciMessage) assert isinstance(dialogue, AbciDialogue) def test_request_apply_snapshot_chunk(self) -> None: """Test decoding of a request load-snapshot-chunk.""" dialogues = AbciDialogues(connection_id=PUBLIC_ID) request = Request() apply_snapshot_chunk = RequestApplySnapshotChunk() request.apply_snapshot_chunk.CopyFrom(apply_snapshot_chunk) message, dialogue = _TendermintProtocolDecoder.request_apply_snapshot_chunk( request, dialogues, "counterparty" ) assert isinstance(message, AbciMessage) assert isinstance(dialogue, AbciDialogue)
null
1,420
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdklive.endpoint import endpoint_data class AddCasterVideoResourceRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'live', '2016-11-01', 'AddCasterVideoResource','live') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ImageId(self): # String return self.get_query_params().get('ImageId') def set_ImageId(self, ImageId): # String self.add_query_param('ImageId', ImageId) def get_EndOffset(self): # Integer return self.get_query_params().get('EndOffset') def METHOD_NAME(self, EndOffset): # Integer self.add_query_param('EndOffset', EndOffset) def get_MaterialId(self): # String return self.get_query_params().get('MaterialId') def set_MaterialId(self, MaterialId): # String self.add_query_param('MaterialId', MaterialId) def get_VodUrl(self): # String return self.get_query_params().get('VodUrl') def set_VodUrl(self, VodUrl): # String self.add_query_param('VodUrl', VodUrl) def get_StreamId(self): # String return self.get_query_params().get('StreamId') def set_StreamId(self, StreamId): # String self.add_query_param('StreamId', StreamId) def get_CasterId(self): # String return self.get_query_params().get('CasterId') def set_CasterId(self, CasterId): # String self.add_query_param('CasterId', CasterId) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_BeginOffset(self): # Integer return self.get_query_params().get('BeginOffset') def set_BeginOffset(self, BeginOffset): # Integer self.add_query_param('BeginOffset', BeginOffset) def get_FixedDelayDuration(self): # Integer return self.get_query_params().get('FixedDelayDuration') def set_FixedDelayDuration(self, FixedDelayDuration): # Integer self.add_query_param('FixedDelayDuration', FixedDelayDuration) def get_LiveStreamUrl(self): # String return self.get_query_params().get('LiveStreamUrl') def set_LiveStreamUrl(self, LiveStreamUrl): # String self.add_query_param('LiveStreamUrl', LiveStreamUrl) def get_LocationId(self): # String return self.get_query_params().get('LocationId') def set_LocationId(self, LocationId): # String self.add_query_param('LocationId', LocationId) def get_ImageUrl(self): # String return self.get_query_params().get('ImageUrl') def set_ImageUrl(self, ImageUrl): # String self.add_query_param('ImageUrl', ImageUrl) def get_PtsCallbackInterval(self): # Integer return self.get_query_params().get('PtsCallbackInterval') def set_PtsCallbackInterval(self, PtsCallbackInterval): # Integer self.add_query_param('PtsCallbackInterval', PtsCallbackInterval) def get_ResourceName(self): # String return self.get_query_params().get('ResourceName') def set_ResourceName(self, ResourceName): # String self.add_query_param('ResourceName', ResourceName) def get_RepeatNum(self): # Integer return self.get_query_params().get('RepeatNum') def set_RepeatNum(self, RepeatNum): # Integer self.add_query_param('RepeatNum', RepeatNum)
null
1,421
""" A Dataset of Information-Seeking Questions and Answers Anchored in Research Papers https://arxiv.org/abs/2105.03011 QASPER is a dataset of 5,049 questions over 1,585 Natural Language Processing papers. Each question is written by an NLP practitioner who read only the title and abstract of the corresponding paper, and the question seeks information present in the full text. The questions are then answered by a separate set of NLP practitioners who also provide supporting evidence to answers. Homepage: https://allenai.org/data/qasper """ from collections import Counter import re import string from lm_eval.base import rf, Task from lm_eval.metrics import f1_score, mean _CITATION = """ @article{DBLP:journals/corr/abs-2105-03011, author = {Pradeep Dasigi and Kyle Lo and Iz Beltagy and Arman Cohan and Noah A. Smith and Matt Gardner}, title = {A Dataset of Information-Seeking Questions and Answers Anchored in Research Papers}, journal = {CoRR}, volume = {abs/2105.03011}, year = {2021}, url = {https://arxiv.org/abs/2105.03011}, eprinttype = {arXiv}, eprint = {2105.03011}, timestamp = {Fri, 14 May 2021 12:13:30 +0200}, biburl = {https://dblp.org/rec/journals/corr/abs-2105-03011.bib}, bibsource = {dblp computer science bibliography, https://dblp.org} } """ def normalize_answer(s): """ Taken from the official evaluation script for v1.1 of the SQuAD dataset. Lower text and remove punctuation, articles and extra whitespace. """ def remove_articles(text): return re.sub(r"\b(a|an|the)\b", " ", text) def white_space_fix(text): return " ".join(text.split()) def remove_punc(text): exclude = set(string.punctuation) return "".join(ch for ch in text if ch not in exclude) def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def categorise_answer(answer_blob): if answer_blob["unanswerable"]: answer = "unanswerable" answer_type = "unanswerable" return answer, answer_type elif answer_blob["yes_no"]: answer = "yes" answer_type = "bool" return answer, answer_type elif answer_blob["free_form_answer"]: answer = answer_blob["free_form_answer"] answer_type = "free form answer" return answer, answer_type elif answer_blob["extractive_spans"]: answer = answer_blob["extractive_spans"] answer_type = "extractive_spans" return answer, answer_type elif answer_blob["yes_no"] is False: answer = "no" answer_type = "bool" return answer, answer_type def token_f1_score(prediction, ground_truth): """ Taken from the official evaluation script for v1.1 of the SQuAD dataset. """ prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common = Counter(prediction_tokens) & Counter(ground_truth_tokens) num_same = sum(common.values()) if num_same == 0: return 0 precision = 1.0 * num_same / len(prediction_tokens) recall = 1.0 * num_same / len(ground_truth_tokens) f1 = (2 * precision * recall) / (precision + recall) return f1 class QASPER(Task): VERSION = 0 DATASET_PATH = "qasper" DATASET_NAME = None def has_training_docs(self): return True def has_validation_docs(self): return True def has_test_docs(self): return False def doc_to_text(self, doc): return ( "TITLE: " + doc["title"] + "\n" + "ABSTRACT: " + doc["abstract"] + "\n\n" + "Q: " + doc["question"] + "\n\n" + "A:" ) def doc_to_target(self, doc): answer = doc["answer"] if isinstance(answer, list): answer = ", ".join(answer) return " " + answer def METHOD_NAME(self): for doc in self.dataset["train"]: yield from self._process_doc(doc) def validation_docs(self): for doc in self.dataset["validation"]: yield from self._process_doc(doc) def _process_doc(self, doc): """Given a `doc`, flatten it out so that each JSON blob contains exactly one question and one answer. Logic taken from the reference implementation available at https://github.com/allenai/qasper-led-baseline/blob/main/scripts/evaluator.py """ obs_list = [] for question, answer_list in zip(doc["qas"]["question"], doc["qas"]["answers"]): for answer_blob in answer_list["answer"]: answer, answer_type = categorise_answer(answer_blob) obs_list.append( { "title": doc["title"], "abstract": doc["abstract"], "question": question, "answer": answer, "answer_type": answer_type, } ) return obs_list def process_results(self, doc, results): # TODO: Calculate a score for extractive spans once a request type for generating # extractive spans is available if not results: return {} elif len(results) == 1: [res] = results elif len(results) == 2: [ll_yes, ll_no] = results # TODO: Handle unanswerability first # unanswerable_gold = doc["answer_type"] == "unanswerable" # unanswerable_pred = exp(logprob_unanswerable) # res_dict["f1_unanswerable"] = (unanswerable_gold, unanswerable_pred) res_dict = {} # Handle yes/no questions if doc["answer_type"] == "bool": gold = 1 if doc["answer"] == "yes" else 0 pred = ll_yes > ll_no res_dict["f1_yesno"] = (gold, pred) # Handle completions if doc["answer_type"] == "free form answer": res_dict["f1_abstractive"] = token_f1_score(res, doc["answer"]) # TODO: Handle extraction # if doc["answer_type"] == "extractive_spans": # res_dict["f1_extractive"] = 0 return res_dict def aggregation(self): return { "f1_yesno": f1_score, "f1_abstractive": mean, } def construct_requests(self, doc, ctx): """Uses RequestFactory to construct Requests and returns an iterable of Requests which will be sent to the LM. :param doc: The document as returned from training_docs, validation_docs, or test_docs. :param ctx: str The context string, generated by fewshot_context. This includes the natural language description, as well as the few shot examples, and the question part of the document for `doc`. """ # unanswerable = rf.loglikelihood(ctx, " " + "unanswerable") if doc["answer_type"] in ("free form answer"): return [rf.greedy_until(ctx, {"until": ["\n"]})] elif doc["answer_type"] in ("bool"): ll_yes, _ = rf.loglikelihood(ctx, " yes") ll_no, _ = rf.loglikelihood(ctx, " no") return [ll_yes, ll_no] else: return [] def higher_is_better(self): """ :returns: {str: bool} A dictionary where keys are the names of submetrics and values are whether a higher value of the submetric is better """ return { "f1_yesno": True, "f1_abstractive": True, }
null
1,422
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdksas.endpoint import endpoint_data class DescribeImageVulListRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Sas', '2018-12-03', 'DescribeImageVulList') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ClusterName(self): # String return self.get_query_params().get('ClusterName') def set_ClusterName(self, ClusterName): # String self.add_query_param('ClusterName', ClusterName) def get_Type(self): # String return self.get_query_params().get('Type') def set_Type(self, Type): # String self.add_query_param('Type', Type) def get_Tag(self): # String return self.get_query_params().get('Tag') def set_Tag(self, Tag): # String self.add_query_param('Tag', Tag) def get_Image(self): # String return self.get_query_params().get('Image') def set_Image(self, Image): # String self.add_query_param('Image', Image) def get_AliasName(self): # String return self.get_query_params().get('AliasName') def set_AliasName(self, AliasName): # String self.add_query_param('AliasName', AliasName) def get_InstanceId(self): # String return self.get_query_params().get('InstanceId') def set_InstanceId(self, InstanceId): # String self.add_query_param('InstanceId', InstanceId) def get_Name(self): # String return self.get_query_params().get('Name') def set_Name(self, Name): # String self.add_query_param('Name', Name) def get_ContainerId(self): # String return self.get_query_params().get('ContainerId') def set_ContainerId(self, ContainerId): # String self.add_query_param('ContainerId', ContainerId) def get_Necessity(self): # String return self.get_query_params().get('Necessity') def set_Necessity(self, Necessity): # String self.add_query_param('Necessity', Necessity) def get_Uuids(self): # String return self.get_query_params().get('Uuids') def set_Uuids(self, Uuids): # String self.add_query_param('Uuids', Uuids) def get_RepoId(self): # String return self.get_query_params().get('RepoId') def set_RepoId(self, RepoId): # String self.add_query_param('RepoId', RepoId) def get_StatusList(self): # String return self.get_query_params().get('StatusList') def set_StatusList(self, StatusList): # String self.add_query_param('StatusList', StatusList) def get_Pod(self): # String return self.get_query_params().get('Pod') def set_Pod(self, Pod): # String self.add_query_param('Pod', Pod) def get_RepoNamespace(self): # String return self.get_query_params().get('RepoNamespace') def set_RepoNamespace(self, RepoNamespace): # String self.add_query_param('RepoNamespace', RepoNamespace) def get_ScanRanges(self): # RepeatList return self.get_query_params().get('ScanRange') def set_ScanRanges(self, ScanRange): # RepeatList for depth1 in range(len(ScanRange)): self.add_query_param('ScanRange.' + str(depth1 + 1), ScanRange[depth1]) def get_PageSize(self): # Integer return self.get_query_params().get('PageSize') def set_PageSize(self, PageSize): # Integer self.add_query_param('PageSize', PageSize) def get_Digest(self): # String return self.get_query_params().get('Digest') def set_Digest(self, Digest): # String self.add_query_param('Digest', Digest) def get_Lang(self): # String return self.get_query_params().get('Lang') def set_Lang(self, Lang): # String self.add_query_param('Lang', Lang) def get_Dealed(self): # String return self.get_query_params().get('Dealed') def set_Dealed(self, Dealed): # String self.add_query_param('Dealed', Dealed) def get_CurrentPage(self): # Integer return self.get_query_params().get('CurrentPage') def set_CurrentPage(self, CurrentPage): # Integer self.add_query_param('CurrentPage', CurrentPage) def get_ClusterId(self): # String return self.get_query_params().get('ClusterId') def set_ClusterId(self, ClusterId): # String self.add_query_param('ClusterId', ClusterId) def get_RepoName(self): # String return self.get_query_params().get('RepoName') def METHOD_NAME(self, RepoName): # String self.add_query_param('RepoName', RepoName) def get_Namespace(self): # String return self.get_query_params().get('Namespace') def set_Namespace(self, Namespace): # String self.add_query_param('Namespace', Namespace) def get_RepoInstanceId(self): # String return self.get_query_params().get('RepoInstanceId') def set_RepoInstanceId(self, RepoInstanceId): # String self.add_query_param('RepoInstanceId', RepoInstanceId) def get_RepoRegionId(self): # String return self.get_query_params().get('RepoRegionId') def set_RepoRegionId(self, RepoRegionId): # String self.add_query_param('RepoRegionId', RepoRegionId)
null
1,423
from methods.regular.regular_api import * from default.tests.test_utils import testing_setup from shared.tests.test_utils import common_actions, data_mocking from shared.query_engine.sql_alchemy_query_elements.query_elements import QueryElement, QueryEntity from shared.query_engine.expressions.expressions import CompareExpression, CompareOperator from shared.query_engine.sql_alchemy_query_elements.scalar import ScalarQueryElement from shared.query_engine.sql_alchemy_query_elements.file import FileQueryElement from shared.query_engine.sql_alchemy_query_elements.attribute import AttributeQueryElement from shared.query_engine.sql_alchemy_query_elements.dataset import DatasetQuery from shared.query_engine.sql_alchemy_query_elements.dataset_tag import TagDatasetQueryElement from shared.query_engine.sql_alchemy_query_elements.labels import LabelsQueryElement from shared.query_engine.sql_alchemy_query_elements.query_elements import QueryElement from lark import Token from typing import List class TestQueryElement(testing_setup.DiffgramBaseTestCase): """ """ def setUp(self): # TODO: this test is assuming the 'my-sandbox-project' exists and some object have been previously created. # For future tests a mechanism of setting up and tearing down the database should be created. super(TestQueryElement, self).setUp() project_data = data_mocking.create_project_with_context( { 'users': [ {'username': 'Test', 'email': '[email protected]', 'password': 'diffgram123', } ] }, self.session ) self.project = project_data['project'] self.project_data = project_data self.auth_api = common_actions.create_project_auth(project = self.project, session = self.session) self.member = self.auth_api.member def test_determine_if_reserved_word(self): reserved_words: List[str] = ['label', 'attribute', 'file', 'dataset_id', 'dataset_tag', 'list'] q_elm = QueryElement() for w in reserved_words: res = q_elm.determine_if_reserved_word(w) self.assertTrue(res) res = q_elm.determine_if_reserved_word('something else') self.assertFalse(res) def test_new_error(self): # Test error invalid keyword query_element, log = QueryElement.new( session = self.session, log = regular_log.default(), project_id = self.project.id, token = Token(value = "something", type_ = "something") ) self.assertEqual(list(log['error'].keys()), ['is_reserved_word']) self.assertIsNone(query_element) def test_new_scalar(self): # Test scalar query_element, log = QueryElement.new( session = self.session, log = regular_log.default(), project_id = self.project.id, token = Token(value = "'something'", type_ = "something") ) self.assertEqual(list(log['error'].keys()), []) self.assertIsNotNone(query_element) self.assertEqual(type(query_element), ScalarQueryElement) self.assertIsNotNone(query_element.project_id) self.assertIsNotNone(query_element.log) self.assertIsNotNone(query_element.query_entity.key, "scalar") def test_new_file(self): # Test file query_element, log = QueryElement.new( session = self.session, log = regular_log.default(), project_id = self.project.id, token = Token(value = "file.something", type_ = "something") ) self.assertEqual(list(log['error'].keys()), []) self.assertIsNotNone(query_element) self.assertEqual(type(query_element), FileQueryElement) self.assertIsNotNone(query_element.project_id) self.assertIsNotNone(query_element.log) self.assertIsNotNone(query_element.query_entity.key, "file") def test_attribute(self): attr = data_mocking.create_attribute_template_group({ 'name': f'something', 'project_id': self.project.id, 'kind': 'select' }, self.session) query_element, log = QueryElement.new( session = self.session, log = regular_log.default(), project_id = self.project.id, token = Token(value = "attribute.something", type_ = "something") ) self.assertEqual(list(log['error'].keys()), []) self.assertIsNotNone(query_element) self.assertEqual(type(query_element), AttributeQueryElement) self.assertIsNotNone(query_element.project_id) self.assertIsNotNone(query_element.log) self.assertIsNotNone(query_element.query_entity.key, "attribute") def test_dataset(self): # Test file query_element, log = QueryElement.new( session = self.session, log = regular_log.default(), project_id = self.project.id, token = Token(value = "dataset.id", type_ = "something") ) self.assertEqual(list(log['error'].keys()), []) self.assertIsNotNone(query_element) self.assertEqual(type(query_element), DatasetQuery) self.assertIsNotNone(query_element.project_id) self.assertIsNotNone(query_element.log) self.assertIsNotNone(query_element.query_entity.key, "dataset_id") def test_dataset_tag(self): # Test file query_element, log = QueryElement.new( session = self.session, log = regular_log.default(), project_id = self.project.id, token = Token(value = "dataset.tag", type_ = "something") ) self.assertEqual(list(log['error'].keys()), []) self.assertIsNotNone(query_element) print('type(query_element)', type(query_element)) self.assertEqual(type(query_element), TagDatasetQueryElement) self.assertIsNotNone(query_element.project_id) self.assertIsNotNone(query_element.log) self.assertIsNotNone(query_element.query_entity.key, "dataset_tag") def METHOD_NAME(self): # Test file label = data_mocking.create_label({ 'name': 'robot', }, self.session) label_file = data_mocking.create_label_file({ 'label': label, 'project_id': self.project.id }, self.session) query_element, log = QueryElement.new( session = self.session, log = regular_log.default(), project_id = self.project.id, token = Token(value = "label.robot", type_ = "something") ) self.assertEqual(list(log['error'].keys()), []) self.assertIsNotNone(query_element) self.assertEqual(type(query_element), LabelsQueryElement) self.assertIsNotNone(query_element.project_id) self.assertIsNotNone(query_element.log) self.assertIsNotNone(query_element.query_entity.key, "dataset_tag")
null
1,424
########################################################################## # # Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # * Neither the name of Image Engine Design nor the names of any # other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import unittest import IECore import IECoreGL IECoreGL.init( False ) class TestState( unittest.TestCase ) : def testConstructor( self ) : s = IECoreGL.State( False ) self.assertTrue( not s.isComplete() ) s = IECoreGL.State( True ) self.assertTrue( s.isComplete() ) def METHOD_NAME( self ) : s = IECoreGL.State( False) self.assertEqual( s.userAttributes(), IECore.CompoundData() ) self.assertTrue( s.userAttributes().isSame( s.userAttributes() ) ) s.userAttributes()["test"] = IECore.IntData( 1 ) self.assertEqual( s.userAttributes(), IECore.CompoundData( { "test" : IECore.IntData( 1 ) } ) ) s2 = IECoreGL.State( s ) self.assertEqual( s.userAttributes(), s2.userAttributes() ) s2.userAttributes()["test2"] = IECore.IntData( 20 ) self.assertEqual( s.userAttributes(), IECore.CompoundData( { "test" : IECore.IntData( 1 ) } ) ) self.assertEqual( s2.userAttributes(), IECore.CompoundData( { "test" : IECore.IntData( 1 ), "test2" : IECore.IntData( 20 ) } ) ) def testScopedBinding( self ) : state1 = IECoreGL.State( True ) state1.add( IECoreGL.NameStateComponent( "billy" ) ) state2 = IECoreGL.State( False ) state2.add( IECoreGL.NameStateComponent( "bob" ) ) self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "billy" ) self.assertEqual( state2.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "bob" ) with IECoreGL.State.ScopedBinding( state2, state1 ) : self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "bob" ) self.assertEqual( state2.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "bob" ) self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "billy" ) self.assertEqual( state2.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "bob" ) def testOverrides( self ) : state1 = IECoreGL.State( True ) state1.add( IECoreGL.NameStateComponent( "billy" ) ) state2 = IECoreGL.State( False ) state2.add( IECoreGL.NameStateComponent( "bob" ), override = True ) state3 = IECoreGL.State( False ) state3.add( IECoreGL.NameStateComponent( "jane" ), override = False ) with IECoreGL.State.ScopedBinding( state2, state1 ) : self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "bob" ) with IECoreGL.State.ScopedBinding( state3, state1 ) : self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "bob" ) self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "bob" ) self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "billy" ) with IECoreGL.State.ScopedBinding( state3, state1 ) : self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "jane" ) with IECoreGL.State.ScopedBinding( state2, state1 ) : self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "bob" ) self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "jane" ) self.assertEqual( state1.get( IECoreGL.NameStateComponent.staticTypeId() ).name(), "billy" ) if __name__ == "__main__": unittest.main()
null
1,425
import asyncio from collections import deque from typing import Optional import pandas as pd from bidict import bidict from hummingbot.core.api_throttler.async_throttler import AsyncThrottler from hummingbot.core.network_base import NetworkBase from hummingbot.core.network_iterator import NetworkStatus from hummingbot.core.utils.async_utils import safe_ensure_future from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory from hummingbot.core.web_assistant.ws_assistant import WSAssistant class CandlesBase(NetworkBase): """ This class serves as a base class for fetching and storing candle data from a cryptocurrency exchange. The class uses the Rest and WS Assistants for all the IO operations, and a double-ended queue to store candles. Also implements the Throttler module for API rate limiting, but it's not so necessary since the realtime data should be updated via websockets mainly. """ interval_to_seconds = bidict({ "1s": 1, "1m": 60, "3m": 180, "5m": 300, "15m": 900, "30m": 1800, "1h": 3600, "2h": 7200, "4h": 14400, "6h": 21600, "8h": 28800, "12h": 43200, "1d": 86400, "3d": 259200, "1w": 604800, "1M": 2592000 }) columns = ["timestamp", "open", "high", "low", "close", "volume", "quote_asset_volume", "n_trades", "taker_buy_base_volume", "taker_buy_quote_volume"] def __init__(self, trading_pair: str, interval: str = "1m", max_records: int = 150): super().__init__() async_throttler = AsyncThrottler(rate_limits=self.rate_limits) self._api_factory = WebAssistantsFactory(throttler=async_throttler) self._candles = deque(maxlen=max_records) self._listen_candles_task: Optional[asyncio.Task] = None self._trading_pair = trading_pair self._ex_trading_pair = self.get_exchange_trading_pair(trading_pair) if interval in self.intervals.keys(): self.interval = interval else: self.logger().exception( f"Interval {interval} is not supported. Available Intervals: {self.intervals.keys()}") raise async def start_network(self): """ This method starts the network and starts a task for listen_for_subscriptions. """ await self.stop_network() self._listen_candles_task = safe_ensure_future(self.listen_for_subscriptions()) async def stop_network(self): """ This method stops the network by canceling the _listen_candles_task task. """ if self._listen_candles_task is not None: self._listen_candles_task.cancel() self._listen_candles_task = None @property def is_ready(self): """ This property returns a boolean indicating whether the _candles deque has reached its maximum length. """ return len(self._candles) == self._candles.maxlen @property def name(self): raise NotImplementedError @property def rest_url(self): raise NotImplementedError @property def health_check_url(self): raise NotImplementedError @property def METHOD_NAME(self): raise NotImplementedError @property def wss_url(self): raise NotImplementedError @property def rate_limits(self): raise NotImplementedError @property def intervals(self): raise NotImplementedError async def check_network(self) -> NetworkStatus: raise NotImplementedError @property def candles_df(self) -> pd.DataFrame: """ This property returns the candles stored in the _candles deque as a Pandas DataFrame. """ return pd.DataFrame(self._candles, columns=self.columns, dtype=float) def get_exchange_trading_pair(self, trading_pair): raise NotImplementedError async def fetch_candles(self, start_time: Optional[int] = None, end_time: Optional[int] = None, limit: Optional[int] = 500): """ This is an abstract method that must be implemented by a subclass to fetch candles from the exchange API. :param start_time: start time to fetch candles :param end_time: end time to fetch candles :param limit: quantity of candles :return: numpy array with the candlesticks """ raise NotImplementedError async def fill_historical_candles(self): """ This is an abstract method that must be implemented by a subclass to fill the _candles deque with historical candles. """ raise NotImplementedError async def listen_for_subscriptions(self): """ Connects to the candlestick websocket endpoint and listens to the messages sent by the exchange. """ ws: Optional[WSAssistant] = None while True: try: ws: WSAssistant = await self._connected_websocket_assistant() await self._subscribe_channels(ws) await self._process_websocket_messages(websocket_assistant=ws) except asyncio.CancelledError: raise except ConnectionError as connection_exception: self.logger().warning(f"The websocket connection was closed ({connection_exception})") except Exception: self.logger().exception( "Unexpected error occurred when listening to public klines. Retrying in 1 seconds...", ) await self._sleep(1.0) finally: await self._on_order_stream_interruption(websocket_assistant=ws) async def _connected_websocket_assistant(self) -> WSAssistant: ws: WSAssistant = await self._api_factory.get_ws_assistant() await ws.connect(ws_url=self.wss_url, ping_timeout=30) return ws async def _subscribe_channels(self, ws: WSAssistant): """ Subscribes to the candles events through the provided websocket connection. :param ws: the websocket assistant used to connect to the exchange """ raise NotImplementedError async def _process_websocket_messages(self, websocket_assistant: WSAssistant): raise NotImplementedError async def _sleep(self, delay): """ Function added only to facilitate patching the sleep in unit tests without affecting the asyncio module """ await asyncio.sleep(delay) async def _on_order_stream_interruption(self, websocket_assistant: Optional[WSAssistant] = None): websocket_assistant and await websocket_assistant.disconnect() self._candles.clear() def get_seconds_from_interval(self, interval: str) -> int: """ This method returns the number of seconds from the interval string. :param interval: interval string :return: number of seconds """ return self.interval_to_seconds[interval]
null
1,426
# Copyright (C) 2018-2023 The NeoVintageous Team (NeoVintageous). # # This file is part of NeoVintageous. # # NeoVintageous is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # NeoVintageous is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NeoVintageous. If not, see <https://www.gnu.org/licenses/>. from NeoVintageous.tests import unittest class Test_gf(unittest.FunctionalTestCase): def assertOpened(self, opener, file: str) -> None: opener.assert_called_with(self.view.window(), file, None, None) # Reset to avoid false-positives in subsequent calls. opener.reset_mock() @unittest.mock_bell() @unittest.mock.patch.dict(unittest.os.environ, {"HOME": "/home"}) @unittest.mock.patch('NeoVintageous.nv.commands.window_open_file') def test_gf(self, opener): # (before, after, expected file name) tests = [ ('x path/to/READ|ME.md y', 'x path/to/READ|ME.md y', 'path/to/README.md'), ('|/tmp/fizz.txt', '|/tmp/fizz.txt', '/tmp/fizz.txt'), ('|\\tmp\\fizz.txt', '|\\tmp\\fizz.txt', '\\tmp\\fizz.txt'), ('/|tmp/fizz.txt', '/|tmp/fizz.txt', '/tmp/fizz.txt'), ('|stop.txt.', '|stop.txt.', 'stop.txt'), ('|comma.txt,', '|comma.txt,', 'comma.txt'), ('|semicolon.txt;', '|semicolon.txt;', 'semicolon.txt'), ('|colon.txt:', '|colon.txt:', 'colon.txt'), ('|bang.txt!', '|bang.txt!', 'bang.txt'), ('|xxx:/tmp/fizz1.txt', '|xxx:/tmp/fizz1.txt', '/tmp/fizz1.txt'), ('|xxx:/tmp/fizz2.txt', '|xxx:/tmp/fizz2.txt', '/tmp/fizz2.txt'), ('|$HOME/fizz3.txt', '|$HOME/fizz3.txt', '/home/fizz3.txt'), ('REA|DME.md', 'REA|DME.md', 'README.md'), (' REA|DME.md ', ' REA|DME.md ', 'README.md'), ('\nREA|DME.md\n', '\nREA|DME.md\n', 'README.md'), ('path/to/REA|DME.md', 'path/to/REA|DME.md', 'path/to/README.md'), (' pat|h/to/README.md ', ' pat|h/to/README.md ', 'path/to/README.md'), ('\npath|/to/README.md\n', '\npath|/to/README.md\n', 'path/to/README.md'), ('|./fizz.txt', '|./fizz.txt', './fizz.txt'), ('|../fizz.txt', '|../fizz.txt', '../fizz.txt'), ('|../../fizz.txt', '|../../fizz.txt', '../../fizz.txt'), ('| /fizz.txt', '| /fizz.txt', '/fizz.txt'), ('|/fizz.txt /buzz.txt', '|/fizz.txt /buzz.txt', '/fizz.txt'), ('/fi|zz.txt /buzz.txt', '/fi|zz.txt /buzz.txt', '/fizz.txt'), ('/fizz.txt |/buzz.txt', '/fizz.txt |/buzz.txt', '/buzz.txt'), ('/fizz.txt /bu|zz.txt', '/fizz.txt /bu|zz.txt', '/buzz.txt'), ('x /fizz.txt /buzz|.txt x', 'x /fizz.txt /buzz|.txt x', '/buzz.txt'), ('|parans.txt(1).', '|parans.txt(1).', 'parans.txt'), ('|row.txt(42)', '|row.txt(42)', 'row.txt'), ('|row.txt:42', '|row.txt:42', 'row.txt'), ('|row.txt@42', '|row.txt@42', 'row.txt'), ('|rowcol.txt:4:2', '|rowcol.txt:4:2', 'rowcol.txt'), ] for test in tests: self.eq(test[0], 'n_gf', test[1]) self.assertOpened(opener, test[2]) self.assertNoBell() @unittest.mock_bell() @unittest.mock.patch('NeoVintageous.nv.commands.window_open_file') def METHOD_NAME(self, opener): self.eq('|', 'n_gf', '|') self.eq('$|$$', 'n_gf', '$|$$') self.assertMockNotCalled(opener) self.assertBell('E446: No file name under cursor', count=2) @unittest.mock_bell() @unittest.mock.patch('NeoVintageous.nv.commands.window_open_file') def test_gf_should_emit_bell_if_file_not_opened(self, opener): opener.return_value = False self.eq('|fizz.txt', 'n_gf', '|fizz.txt') self.assertBell('E447: Cannot find file \'fizz.txt\' in path') @unittest.mock_bell() def test_gf_not_found(self): self.eq('|tests/fixtures/foo.txt', 'n_gf', '|tests/fixtures/foo.txt') self.assertBell('E447: Cannot find file \'tests/fixtures/foo.txt\' in path') @unittest.mock_session() @unittest.mock_bell() @unittest.mock.patch('sublime.Window.open_file') def test_gf_found(self, opener): fixture = self.fixturePath('fizz.txt') self.feed(':cd ' + self.rootPath()) if unittest.is_windows(): self.eq('|tests\\fixtures\\fizz.txt', 'n_gf', '|tests\\fixtures\\fizz.txt') else: self.eq('|tests/fixtures/fizz.txt', 'n_gf', '|tests/fixtures/fizz.txt') opener.assert_called_with(fixture, 0) self.assertNoBell() @unittest.mock.patch.dict(unittest.os.environ, {"HOME": "/home"}) @unittest.mock.patch('NeoVintageous.nv.commands.window_open_file') def test_v(self, opener): self.eq('/tmp|/fizz/buzz1.log| x', 'v_gf', '/tmp|/fizz/buzz1.log| x') self.assertOpened(opener, '/fizz/buzz1.log') self.eq('r_/tmp|/fizz/buzz2.log| x', 'v_gf', 'r_/tmp|/fizz/buzz2.log| x') self.assertOpened(opener, '/fizz/buzz2.log') @unittest.mock.patch.dict(unittest.os.environ, {"HOME": "/home"}) @unittest.mock.patch('NeoVintageous.nv.commands.window_open_file') def test_V(self, opener): self.eq('x\n|/tmp/fizz.log\n|x', 'V_gf', 'x\n|/tmp/fizz.log\n|x') self.assertOpened(opener, '/tmp/fizz.log') @unittest.mock.patch.dict(unittest.os.environ, {"HOME": "/home"}) @unittest.mock.patch('NeoVintageous.nv.commands.window_open_file') def test_b(self, opener): self.eq('x\n|/tmp/fizz.log|\nx', 'b_gf', 'x\n|/tmp/fizz.log|\nx') self.assertOpened(opener, '/tmp/fizz.log')
null
1,427
# *************************************************************************** # * # * Authors: Amaya Jimenez ([email protected]) # * # * This program is free software; you can redistribute it and/or modify # * it under the terms of the GNU General Public License as published by # * the Free Software Foundation; either version 2 of the License, or # * (at your option) any later version. # * # * This program is distributed in the hope that it will be useful, # * but WITHOUT ANY WARRANTY; without even the implied warranty of # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # * GNU General Public License for more details. # * # * You should have received a copy of the GNU General Public License # * along with this program; if not, write to the Free Software # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA # * 02111-1307 USA # * # * All comments concerning this program package may be sent to the # * e-mail address '[email protected]' # ***************************************************************************/ from pyworkflow.tests import BaseTest, setupTestProject, DataSet from pyworkflow.plugin import Domain from pwem.protocols import ProtImportMicrographs, ProtSubSet from xmipp3.protocols.protocol_extract_particles import * from xmipp3.protocols.protocol_cl2d import * from xmipp3.protocols.protocol_center_particles import * ProtCTFFind = Domain.importFromPlugin('cistem.protocols', 'CistemProtCTFFind', doRaise=True) EmanProtAutopick = Domain.importFromPlugin('eman2.protocols', 'EmanProtAutopick', doRaise=True) # Number of mics to be processed NUM_MICS = 5 class TestCenterParticles(BaseTest): @classmethod def setUpClass(cls): setupTestProject(cls) cls.dsRelion = DataSet.getDataSet('relion_tutorial') def importMicrographs(self): prot = self.newProtocol(ProtImportMicrographs, filesPath=self.dsRelion.getFile('micrographs'), filesPattern='*.mrc', samplingRateMode=1, magnification=79096, scannedPixelSize=56, voltage=300, sphericalAberration=2.0) self.launchProtocol(prot) return prot def subsetMics(self, inputMics): protSubset = ProtSubSet() protSubset.inputFullSet.set(inputMics) protSubset.chooseAtRandom.set(True) protSubset.nElements.set(NUM_MICS) self.launchProtocol(protSubset) return protSubset def calculateCtf(self, inputMics): protCTF = ProtCTFFind() protCTF.inputMicrographs.set(inputMics) # Gone in new version: protCTF.ctfDownFactor.set(1.0) protCTF.lowRes.set(44) protCTF.highRes.set(15) self.launchProtocol(protCTF) return protCTF def METHOD_NAME(self, inputMicrographs): """ Run a particle picking. """ protPicking = EmanProtAutopick(boxSize=64, numberOfThreads=1, numberOfMpi=1, boxerMode=3, gaussLow=0.001) protPicking.inputMicrographs.set(inputMicrographs) self.launchProtocol(protPicking) return protPicking def runExtractParticles(self, inputCoord, setCtfs): protExtract = self.newProtocol(XmippProtExtractParticles, boxSize=64, doInvert = True, doFlip = False) protExtract.inputCoordinates.set(inputCoord) protExtract.ctfRelations.set(setCtfs) self.launchProtocol(protExtract) return protExtract def runClassify(self, inputParts): numClasses = int(inputParts.getSize()/1000) if numClasses<=2: numClasses=4 protClassify = self.newProtocol(XmippProtCL2D, numberOfIterations = 2, numberOfClasses=numClasses, numberOfInitialClasses=numClasses) protClassify.inputParticles.set(inputParts) self.launchProtocol(protClassify) return protClassify, numClasses def runRealign(self, inputClasses, inputMics): protRealing = self.newProtocol(XmippProtCenterParticles) protRealing.inputClasses.set(inputClasses) protRealing.inputMics.set(inputMics) self.launchProtocol(protRealing) return protRealing def test_pattern(self): protImportMics = self.importMicrographs() if protImportMics.isFailed(): self.assertTrue(False) if NUM_MICS<20: protSubsetMics = self.subsetMics(protImportMics.outputMicrographs) if protSubsetMics.isFailed(): self.assertTrue(False) outMics = protSubsetMics.outputMicrographs protCtf = self.calculateCtf(outMics) if protCtf.isFailed(): self.assertTrue(False) protPicking = self.METHOD_NAME(outMics) if protPicking.isFailed(): self.assertTrue(False) protExtract = self.runExtractParticles\ (protPicking.outputCoordinates, protCtf.outputCTF) if protExtract.isFailed(): self.assertTrue(False) protClassify, numClasses = self.runClassify(protExtract.outputParticles) if protClassify.isFailed(): self.assertTrue(False) if not protClassify.hasAttribute('outputClasses'): self.assertTrue(False) if protClassify.outputClasses.getSize() != numClasses: self.assertTrue(False) protRealing = self.runRealign(protClassify.outputClasses, outMics) if protRealing.isFailed(): self.assertTrue(False) if not protRealing.hasAttribute('outputClasses') or not \ protRealing.hasAttribute('outputParticles'): self.assertTrue(False) if protRealing.outputClasses.getSize() != numClasses: self.assertTrue(False)
null
1,428
# Copyright 2018-2022 The glTF-Blender-IO authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ....io.com.gltf2_io_extensions import Extension from ...exp import gltf2_blender_get from . import gltf2_blender_gather_texture_info def detect_shadeless_material(blender_material, export_settings): """Detect if this material is "shadeless" ie. should be exported with KHR_materials_unlit. Returns None if not. Otherwise, returns a dict with info from parsing the node tree. """ if not blender_material.use_nodes: return None # Old Background node detection (unlikely to happen) bg_socket = gltf2_blender_get.get_socket(blender_material, "Background") if bg_socket is not None: return {'rgb_socket': bg_socket} # Look for # * any color socket, connected to... # * optionally, the lightpath trick, connected to... # * optionally, a mix-with-transparent (for alpha), connected to... # * the output node info = {} for node in blender_material.node_tree.nodes: if node.type == 'OUTPUT_MATERIAL' and node.is_active_output: socket = node.inputs[0] break else: return None # Be careful not to misidentify a lightpath trick as mix-alpha. result = __detect_lightpath_trick(socket) if result is not None: socket = result['next_socket'] else: result = __detect_mix_alpha(socket) if result is not None: socket = result['next_socket'] info['alpha_socket'] = result['alpha_socket'] result = __detect_lightpath_trick(socket) if result is not None: socket = result['next_socket'] # Check if a color socket, or connected to a color socket if socket.type != 'RGBA': from_socket = gltf2_blender_get.previous_socket(socket) if from_socket is None: return None if from_socket.type != 'RGBA': return None info['rgb_socket'] = socket return info def __detect_mix_alpha(socket): # Detects this (used for an alpha hookup) # # [ Mix ] # alpha_socket => [Factor ] => socket # [Transparent] => [Shader ] # next_socket => [Shader ] # # Returns None if not detected. Otherwise, a dict containing alpha_socket # and next_socket. prev = gltf2_blender_get.previous_node(socket) if prev is None or prev.type != 'MIX_SHADER': return None in1 = gltf2_blender_get.previous_node(prev.inputs[1]) if in1 is None or in1.type != 'BSDF_TRANSPARENT': return None return { 'alpha_socket': prev.inputs[0], 'next_socket': prev.inputs[2], } def __detect_lightpath_trick(socket): # Detects this (used to prevent casting light on other objects) See ex. # https://blender.stackexchange.com/a/21535/88681 # # [ Lightpath ] [ Mix ] # [ Is Camera Ray] => [Factor ] => socket # (don't care) => [Shader ] # next_socket => [ Emission ] => [Shader ] # # The Emission node can be omitted. # Returns None if not detected. Otherwise, a dict containing # next_socket. prev = gltf2_blender_get.previous_node(socket) if prev is None or prev.type != 'MIX_SHADER': return None in0 = gltf2_blender_get.previous_socket(prev.inputs[0]) if in0 is None or in0.node.type != 'LIGHT_PATH': return None if in0.name != 'Is Camera Ray': return None next_socket = prev.inputs[2] # Detect emission prev = gltf2_blender_get.previous_node(next_socket) if prev is not None and prev.type == 'EMISSION': next_socket = prev.inputs[0] return {'next_socket': next_socket} def gather_base_color_factor(info, export_settings): rgb, alpha = None, None if 'rgb_socket' in info: rgb = gltf2_blender_get.get_factor_from_socket(info['rgb_socket'], kind='RGB') if 'alpha_socket' in info: alpha = gltf2_blender_get.get_factor_from_socket(info['alpha_socket'], kind='VALUE') if rgb is None: rgb = [1.0, 1.0, 1.0] if alpha is None: alpha = 1.0 rgba = [*rgb, alpha] if rgba == [1, 1, 1, 1]: return None return rgba def METHOD_NAME(info, export_settings): sockets = (info.get('rgb_socket'), info.get('alpha_socket')) sockets = tuple(s for s in sockets if s is not None) if sockets: # NOTE: separate RGB and Alpha textures will not get combined # because gather_image determines how to pack images based on the # names of sockets, and the names are hard-coded to a Principled # style graph. unlit_texture, unlit_use_active_uvmap, _ = gltf2_blender_gather_texture_info.gather_texture_info( sockets[0], sockets, (), export_settings, ) return unlit_texture, ["unlitTexture"] if unlit_use_active_uvmap else None return None, None
null
1,429
import sys import click from tabulate import tabulate from ai.backend.cli.main import main from ai.backend.cli.types import ExitCode from ..session import Session from .pretty import print_error, print_info, print_warn @main.group() def dotfile(): """Provides dotfile operations.""" @dotfile.command() @click.argument("path", metavar="PATH") @click.option( "--perm", "permission", help="Linux permission represented in octal number (e.g. 755) Defaults to 755 if not specified", ) @click.option( "-f", "--file", "dotfile_path", help="Path to dotfile to upload. If not specified, client will try to read file from STDIN. ", ) @click.option( "-o", "--owner", "--owner-access-key", "owner_access_key", metavar="ACCESS_KEY", help="Specify the owner of the target session of user dotfiles.", ) @click.option( "-d", "--domain", "domain", metavar="DOMAIN", help="Specify the domain name of domain dotfiles." ) @click.option( "-g", "--group", metavar="GROUP", help=( "Specify the group name or id of group dotfiles. " "(If group name is provided, domain name must be specified with option -d)" ), ) def create(path, permission, dotfile_path, owner_access_key, domain, group): """ Store dotfile to Backend.AI Manager. Dotfiles will be automatically loaded when creating kernels. PATH: Where dotfiles will be created when starting kernel """ if dotfile_path: with open(dotfile_path, "r") as fr: body = fr.read() else: body = "" for line in sys.stdin: body += line + "\n" with Session() as session: try: if not permission: permission = "755" dotfile_ = session.Dotfile.create( body, path, permission, owner_access_key=owner_access_key, domain=domain, group=group, ) print_info(f"Dotfile {dotfile_.path} created and ready") except Exception as e: print_error(e) sys.exit(ExitCode.FAILURE) @dotfile.command() @click.argument("path", metavar="PATH") @click.option( "-o", "--owner", "--owner-access-key", "owner_access_key", metavar="ACCESS_KEY", help="Specify the owner of the target session of user dotfiles.", ) @click.option( "-d", "--domain", "domain", metavar="DOMAIN", help="Specify the domain name of domain dotfiles." ) @click.option( "-g", "--group", metavar="GROUP", help=( "Specify the group name or id of group dotfiles. " "(If group name is provided, domain name must be specified with option -d)" ), ) def get(path, owner_access_key, domain, group): """ Print dotfile content. """ with Session() as session: try: dotfile_ = session.Dotfile( path, owner_access_key=owner_access_key, domain=domain, group=group ) body = dotfile_.get() print(body["data"]) except Exception as e: print_error(e) sys.exit(ExitCode.FAILURE) @dotfile.command() @click.option( "-o", "--owner", "--owner-access-key", "owner_access_key", metavar="ACCESS_KEY", help="Specify the owner of the target session of user dotfiles.", ) @click.option( "-d", "--domain", "domain", metavar="DOMAIN", help="Specify the domain name of domain dotfiles." ) @click.option( "-g", "--group", metavar="GROUP", help=( "Specify the group name or id of group dotfiles. " "(If group name is provided, domain name must be specified with option -d)" ), ) def list(owner_access_key, domain, group): """ List available user/domain/group dotfiles. """ fields = [ ("Path", "path", None), ("Data", "data", lambda v: v[:30].splitlines()[0]), ("Permission", "permission", None), ] with Session() as session: try: resp = session.Dotfile.list_dotfiles( owner_access_key=owner_access_key, domain=domain, group=group ) if not resp: print("There is no dotfiles created yet.") return rows = ( tuple( item[key] if transform is None else transform(item[key]) for _, key, transform in fields ) for item in resp ) hdrs = (display_name for display_name, _, _ in fields) print(tabulate(rows, hdrs)) except Exception as e: print_error(e) sys.exit(ExitCode.FAILURE) @dotfile.command() @click.argument("path", metavar="PATH") @click.option( "--perm", "permission", help="Linux permission represented in octal number (e.g. 755) Defaults to 755 if not specified", ) @click.option( "-f", "--file", "dotfile_path", help="Path to dotfile to upload. If not specified, client will try to read file from STDIN. ", ) @click.option( "-o", "--owner", "--owner-access-key", "owner_access_key", metavar="ACCESS_KEY", help="Specify the owner of the target session of user dotfiles.", ) @click.option( "-d", "--domain", "domain", metavar="DOMAIN", help="Specify the domain name of domain dotfiles." ) @click.option( "-g", "--group", metavar="GROUP", help=( "Specify the group name or id of group dotfiles. " "(If group name is provided, domain name must be specified with option -d)" ), ) def METHOD_NAME(path, permission, dotfile_path, owner_access_key, domain, group): """ Update dotfile stored in Backend.AI Manager. """ if dotfile_path: with open(dotfile_path, "r") as fr: body = fr.read() else: body = "" for line in sys.stdin: body += line + "\n" with Session() as session: try: if not permission: permission = "755" dotfile_ = session.Dotfile( path, owner_access_key=owner_access_key, domain=domain, group=group ) dotfile_.METHOD_NAME(body, permission) print_info(f"Dotfile {dotfile_.path} updated") except Exception as e: print_error(e) sys.exit(ExitCode.FAILURE) @dotfile.command() @click.argument("path", metavar="PATH") @click.option("-f", "--force", type=bool, is_flag=True, help="Delete dotfile without confirmation.") @click.option( "-o", "--owner", "--owner-access-key", "owner_access_key", metavar="ACCESS_KEY", help="Specify the owner of the target session of user dotfiles.", ) @click.option( "-d", "--domain", "domain", metavar="DOMAIN", help="Specify the domain name of domain dotfiles." ) @click.option( "-g", "--group", metavar="GROUP", help=( "Specify the group name or id of group dotfiles. " "(If group name is provided, domain name must be specified with option -d)" ), ) def delete(path, force, owner_access_key, domain, group): """ Delete dotfile from Backend.AI Manager. """ with Session() as session: dotfile_ = session.Dotfile( path, owner_access_key=owner_access_key, domain=domain, group=group ) if not force: print_warn("Are you sure? (y/[n])") result = input() if result.strip() != "y": print_info("Aborting.") exit() try: dotfile_.delete() print_info(f"Dotfile {dotfile_.path} deleted") except Exception as e: print_error(e) sys.exit(ExitCode.FAILURE)
null
1,430
# -*- coding: utf-8 -*- # # This file is part of Glances. # # SPDX-FileCopyrightText: 2022 Nicolas Hennion <[email protected]> # # SPDX-License-Identifier: LGPL-3.0-only # import re from glances.logger import logger class GlancesFilter(object): """Allow Glances to filter processes >>> f = GlancesFilter() >>> f.filter = '.*python.*' >>> f.filter '.*python.*' >>> f.key None >>> f.filter = 'user:nicolargo' >>> f.filter 'nicolargo' >>> f.key 'user' >>> f.filter = 'username:.*nico.*' >>> f.filter '.*nico.*' >>> f.key 'username' """ def __init__(self): # Filter entered by the user (string) self._filter_input = None # Filter to apply self._filter = None # Filter regular expression self._filter_re = None # Dict key where the filter should be applied # Default is None: search on command line and process name self._filter_key = None @property def filter_input(self): """Return the filter given by the user (as a string)""" return self._filter_input @property def filter(self): """Return the current filter to be applied""" return self._filter @filter.setter def filter(self, value): """Set the filter (as a string) and compute the regular expression A filter could be one of the following: - python > Process name of cmd start with python - .*python.* > Process name of cmd contain python - username:nicolargo > Process of nicolargo user """ self._filter_input = value if value is None: self._filter = None self._filter_key = None else: new_filter = value.split(':') if len(new_filter) == 1: self._filter = new_filter[0] self._filter_key = None else: self._filter = new_filter[1] self._filter_key = new_filter[0] self._filter_re = None if self.filter is not None: logger.info("Set filter to {} on key {}".format(self.filter, self.filter_key)) # Compute the regular expression try: self._filter_re = re.compile(self.filter) logger.debug("Filter regex compilation OK: {}".format(self.filter)) except Exception as e: logger.error("Cannot compile filter regex: {} ({})".format(self.filter, e)) self._filter = None self._filter_re = None self._filter_key = None @property def filter_re(self): """Return the filter regular expression""" return self._filter_re @property def filter_key(self): """key where the filter should be applied""" return self._filter_key def is_filtered(self, process): """Return True if the process item match the current filter :param process: A dict corresponding to the process item. """ if self.filter is None: # No filter => Not filtered return False if self.filter_key is None: # Apply filter on command line and process name return self.METHOD_NAME(process, key='name') or self.METHOD_NAME(process, key='cmdline') else: # Apply filter on <key> return self.METHOD_NAME(process) def METHOD_NAME(self, process, key=None): """Return True if the process[key] should be filtered according to the current filter""" if key is None: key = self.filter_key try: # If the item process[key] is a list, convert it to a string # in order to match it with the current regular expression if isinstance(process[key], list): value = ' '.join(process[key]) else: value = process[key] except KeyError: # If the key did not exist return False try: return self._filter_re.fullmatch(value) is None except (AttributeError, TypeError): # AttributeError - Filter processes crashes with a bad regular expression pattern (issue #665) # TypeError - Filter processes crashes if value is None (issue #1105) return False
null
1,431
""" :meta private: """ from pharmpy.deps import sympy from pharmpy.model import ( Assignment, Compartment, CompartmentalSystem, CompartmentalSystemBuilder, Model, Statements, output, ) from .error import set_proportional_error_model from .odes import add_individual_parameter, set_initial_estimates def add_effect_compartment(model: Model, expr: str): r"""Add an effect compartment. Implemented PD models are: * Baseline: .. math:: E = E_0 * Linear: .. math:: E = E_0 + \text{slope} \cdot C * Emax: .. math:: E = E_0 + \frac {E_{max} \cdot C } { EC_{50} + C } * Step effect: .. math:: E = \Biggl \lbrace {E_0 \quad \text{if C} \leq 0 \atop E_0 + E_{max} \quad \text{else}} * Sigmoidal: .. math:: E=\Biggl \lbrace {E_0+\frac{E_{max} \cdot C^n}{EC_{50}^n+C^n} \quad \text{if C}>0 \atop \ E_0 \quad \text{else}} * Log-linear: .. math:: E = \text{slope} \cdot \text{log}(C + C_0) Parameters ---------- model : Model Pharmpy model expr : str Name of the PD effect function. Valid names are: baseline, linear, Emax, sigmoid, step, loglin Return ------ Model Pharmpy model object Examples -------- >>> from pharmpy.modeling import * >>> model = load_example_model("pheno") >>> model = add_effect_compartment(model, "linear") >>> model.statements.ode_system.find_compartment("EFFECT") Compartment(EFFECT, amount=A_EFFECT, input=KE0*A_CENTRAL(t)/V) """ vc, cl = _get_central_volume_and_cl(model) odes = model.statements.ode_system central = odes.central_compartment central_amount = sympy.Function(central.amount.name)(sympy.Symbol('t')) cb = CompartmentalSystemBuilder(odes) ke0 = sympy.Symbol("KE0") model = add_individual_parameter(model, ke0.name) effect = Compartment.create("EFFECT", input=ke0 * central_amount / vc) cb.add_compartment(effect) cb.add_flow(effect, output, ke0) model = model.replace( statements=Statements( model.statements.before_odes + CompartmentalSystem(cb) + model.statements.after_odes ) ) conc_e = model.statements.ode_system.find_compartment("EFFECT").amount model = METHOD_NAME(model, expr, conc_e) return model def set_direct_effect(model: Model, expr: str): r"""Add an effect to a model. Implemented PD models are: * Baseline: .. math:: E = E_0 * Linear: .. math:: E = E_0 + \text{slope} \cdot C * Emax: .. math:: E = E_0 + \frac {E_{max} \cdot C } { EC_{50} + C } * Step effect: .. math:: E = \Biggl \lbrace {E_0 \quad \text{if C} \leq 0 \atop E_0 + E_{max} \quad \text{else}} * Sigmoidal: .. math:: E=\Biggl \lbrace {E_0+\frac{E_{max} \cdot C^n}{EC_{50}^n+C^n} \quad \text{if C}>0 \atop \ E_0 \quad \text{else}} * Log-linear: .. math:: E = \text{slope} \cdot \text{log}(C + C_0) Parameters ---------- model : Model Pharmpy model expr : str Name of PD effect function. Valid names are: baseline, linear, Emax, sigmoid, step, loglin Return ------ Model Pharmpy model object Examples -------- >>> from pharmpy.modeling import * >>> model = load_example_model("pheno") >>> model = set_direct_effect(model, "linear") >>> model.statements.find_assignment("E") A_CENTRAL⋅SLOPE ─────────────── + E₀ E = V """ vc, cl = _get_central_volume_and_cl(model) conc = model.statements.ode_system.central_compartment.amount / vc model = METHOD_NAME(model, expr, conc) return model def _get_central_volume_and_cl(model): odes = model.statements.ode_system central_comp = odes.central_compartment elimination_rate = odes.get_flow(central_comp, output) numer, denom = elimination_rate.as_numer_denom() if denom != 1: vc = denom cl = numer else: raise ValueError('Model is not suitable') return vc, cl def METHOD_NAME(model: Model, expr: str, conc): e0 = sympy.Symbol("E0") model = add_individual_parameter(model, e0.name) if expr in ["Emax", "sigmoid", "step"]: emax = sympy.Symbol("E_MAX") model = add_individual_parameter(model, emax.name) if expr in ["Emax", "sigmoid"]: ec50 = sympy.Symbol("EC_50") model = add_individual_parameter(model, ec50.name) # Add effect E if expr == "baseline": E = Assignment(sympy.Symbol('E'), e0) elif expr == "linear": s = sympy.Symbol("SLOPE") model = add_individual_parameter(model, s.name) E = Assignment(sympy.Symbol('E'), e0 + s * conc) elif expr == "Emax": E = Assignment(sympy.Symbol("E"), e0 + emax * conc / (ec50 + conc)) elif expr == "step": E = Assignment(sympy.Symbol("E"), sympy.Piecewise((e0, conc <= 0), (e0 + emax, True))) elif expr == "sigmoid": n = sympy.Symbol("n") # Hill coefficient model = add_individual_parameter(model, n.name) model = set_initial_estimates(model, {"POP_n": 1}) E = Assignment( sympy.Symbol("E"), sympy.Piecewise( ((e0 + emax * conc**n / (ec50**n + conc**n)), conc > 0), (e0, True) ), ) elif expr == "loglin": s = sympy.Symbol("SLOPE") model = add_individual_parameter(model, s.name) E = Assignment(sympy.Symbol("E"), s * sympy.log(conc + sympy.exp(e0 / s))) else: raise ValueError(f'Unknown model "{expr}".') # Add dependent variable Y_2 y_2 = sympy.Symbol('Y_2') y = Assignment(y_2, E.symbol) dvs = model.dependent_variables.replace(y_2, 2) model = model.replace(statements=model.statements + E + y, dependent_variables=dvs) # Add error model model = set_proportional_error_model(model, dv=2, zero_protection=False) return model
null
1,432
#!/usr/bin/python # # Copyright 2015 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Requires Python 2.4+ import os import os.path import sys import imp import base64 import re import json import platform import shutil import time import traceback import datetime import subprocess from redhatPatching import redhatPatching from Common import * from CommandExecutor import * class centosPatching(redhatPatching): def __init__(self, logger, distro_info): super(centosPatching, self).__init__(logger, distro_info) self.logger = logger self.command_executor = CommandExecutor(logger) if distro_info[1] in ["6.9", "6.8", "6.7", "6.6", "6.5"]: self.base64_path = '/usr/bin/base64' self.bash_path = '/bin/bash' self.blkid_path = '/sbin/blkid' self.cat_path = '/bin/cat' self.cryptsetup_path = '/sbin/cryptsetup' self.dd_path = '/bin/dd' self.e2fsck_path = '/sbin/e2fsck' self.echo_path = '/bin/echo' self.lsblk_path = '/bin/lsblk' self.lsscsi_path = '/usr/bin/lsscsi' self.mkdir_path = '/bin/mkdir' self.mount_path = '/bin/mount' self.openssl_path = '/usr/bin/openssl' self.resize2fs_path = '/sbin/resize2fs' self.umount_path = '/bin/umount' else: self.base64_path = '/usr/bin/base64' self.bash_path = '/usr/bin/bash' self.blkid_path = '/usr/bin/blkid' self.cat_path = '/bin/cat' self.cryptsetup_path = '/usr/sbin/cryptsetup' self.dd_path = '/usr/bin/dd' self.e2fsck_path = '/sbin/e2fsck' self.echo_path = '/usr/bin/echo' self.lsblk_path = '/usr/bin/lsblk' self.lsscsi_path = '/usr/bin/lsscsi' self.mkdir_path = '/usr/bin/mkdir' self.mount_path = '/usr/bin/mount' self.openssl_path = '/usr/bin/openssl' self.resize2fs_path = '/sbin/resize2fs' self.umount_path = '/usr/bin/umount' def install_adal(self): # epel-release and python-pip >= version 8.1 are adal prerequisites # https://github.com/AzureAD/azure-activedirectory-library-for-python/ self.command_executor.Execute("yum install -y epel-release") self.command_executor.Execute("yum install -y python-pip") self.command_executor.Execute("python -m pip install --upgrade pip") self.command_executor.Execute("python -m pip install adal") def install_extras(self): packages = ['cryptsetup', 'lsscsi', 'psmisc', 'cryptsetup-reencrypt', 'lvm2', 'uuid', 'at', 'patch', 'procps-ng', 'util-linux', 'pyparted'] if self.distro_info[1].startswith("6."): packages.add('python-six') packages.remove('cryptsetup') packages.remove('procps-ng') packages.remove('util-linux') if self.command_executor.Execute("rpm -q " + " ".join(packages)): self.command_executor.Execute("yum install -y " + " ".join(packages)) def METHOD_NAME(self): if (self.distro_info[1].startswith('7.')): dracut_repack_needed = False if os.path.exists("/lib/dracut/modules.d/91lvm/"): # If 90lvm already exists 91lvm will cause problems, so remove it. if os.path.exists("/lib/dracut/modules.d/90lvm/"): shutil.rmtree("/lib/dracut/modules.d/91lvm/") else: os.rename("/lib/dracut/modules.d/91lvm/","/lib/dracut/modules.d/90lvm/") dracut_repack_needed = True if redhatPatching.is_old_patching_system(): redhatPatching.remove_old_patching_system(self.logger, self.command_executor) dracut_repack_needed = True if os.path.exists("/lib/dracut/modules.d/91ade/"): shutil.rmtree("/lib/dracut/modules.d/91ade/") dracut_repack_needed = True if os.path.exists("/dev/mapper/osencrypt"): #TODO: only do this if needed (if code and existing module are different) redhatPatching.add_91_ade_dracut_module(self.command_executor) dracut_repack_needed = True if dracut_repack_needed: self.command_executor.ExecuteInBash("/usr/sbin/dracut -f -v --kver `grubby --default-kernel | sed 's|/boot/vmlinuz-||g'`", True)
null
1,433
import logging from decimal import Decimal from functools import lru_cache from typing import Dict, List, Optional, Set from hummingbot.client.config.client_config_map import ClientConfigMap from hummingbot.client.config.config_helpers import ReadOnlyClientConfigAdapter, get_connector_class from hummingbot.client.config.security import Security from hummingbot.client.settings import AllConnectorSettings, GatewayConnectionSetting, gateway_connector_trading_pairs from hummingbot.core.utils.async_utils import safe_gather from hummingbot.core.utils.gateway_config_utils import flatten from hummingbot.core.utils.market_price import get_last_price class UserBalances: __instance = None @staticmethod def connect_market(exchange, client_config_map: ClientConfigMap, **api_details): connector = None conn_setting = AllConnectorSettings.get_connector_settings()[exchange] if api_details or conn_setting.uses_gateway_generic_connector(): connector_class = get_connector_class(exchange) read_only_client_config = ReadOnlyClientConfigAdapter.lock_config(client_config_map) init_params = conn_setting.conn_init_parameters( trading_pairs=gateway_connector_trading_pairs(conn_setting.name), api_keys=api_details, client_config_map=read_only_client_config, ) # collect trading pairs from the gateway connector settings trading_pairs: List[str] = gateway_connector_trading_pairs(conn_setting.name) # collect unique trading pairs that are for balance reporting only if conn_setting.uses_gateway_generic_connector(): config: Optional[Dict[str, str]] = GatewayConnectionSetting.get_connector_spec_from_market_name(conn_setting.name) if config is not None: existing_pairs = set(flatten([x.split("-") for x in trading_pairs])) other_tokens: Set[str] = set(config.get("tokens", "").split(",")) other_tokens.discard("") tokens: List[str] = [t for t in other_tokens if t not in existing_pairs] if tokens != [""]: trading_pairs.append("-".join(tokens)) connector = connector_class(**init_params) return connector # return error message if the _update_balances fails @staticmethod async def _update_balances(market) -> Optional[str]: try: await market._update_balances() except Exception as e: logging.getLogger().debug(f"Failed to update balances for {market}", exc_info=True) return str(e) return None @staticmethod def instance(): if UserBalances.__instance is None: UserBalances() return UserBalances.__instance @staticmethod @lru_cache(maxsize=10) def is_gateway_market(exchange_name: str) -> bool: return ( exchange_name in sorted( AllConnectorSettings.get_gateway_amm_connector_names().union( AllConnectorSettings.get_gateway_evm_amm_lp_connector_names() ).union( AllConnectorSettings.get_gateway_clob_connector_names() ) ) ) def __init__(self): if UserBalances.__instance is not None: raise Exception("This class is a singleton!") else: UserBalances.__instance = self self._markets = {} async def add_exchange(self, exchange, client_config_map: ClientConfigMap, **api_details) -> Optional[str]: self._markets.pop(exchange, None) market = UserBalances.connect_market(exchange, client_config_map, **api_details) if not market: return "API keys have not been added." err_msg = await UserBalances._update_balances(market) if err_msg is None: self._markets[exchange] = market return err_msg def all_balances(self, exchange) -> Dict[str, Decimal]: if exchange not in self._markets: return {} return self._markets[exchange].get_all_balances() async def update_exchange_balance(self, exchange_name: str, client_config_map: ClientConfigMap) -> Optional[str]: is_gateway_market = self.is_gateway_market(exchange_name) if is_gateway_market and exchange_name in self._markets: # we want to refresh gateway connectors always, since the applicable tokens change over time. # doing this will reinitialize and fetch balances for active trading pair del self._markets[exchange_name] if exchange_name in self._markets: return await self._update_balances(self._markets[exchange_name]) else: await Security.wait_til_decryption_done() api_keys = Security.api_keys(exchange_name) if not is_gateway_market else {} return await self.add_exchange(exchange_name, client_config_map, **api_keys) # returns error message for each exchange async def update_exchanges( self, client_config_map: ClientConfigMap, reconnect: bool = False, exchanges: Optional[List[str]] = None ) -> Dict[str, Optional[str]]: exchanges = exchanges or [] tasks = [] # Update user balances if len(exchanges) == 0: exchanges = [cs.name for cs in AllConnectorSettings.get_connector_settings().values()] exchanges: List[str] = [ cs.name for cs in AllConnectorSettings.get_connector_settings().values() if not cs.use_ethereum_wallet and cs.name in exchanges and not cs.name.endswith("paper_trade") ] if reconnect: self._markets.clear() for exchange in exchanges: tasks.append(self.update_exchange_balance(exchange, client_config_map)) results = await safe_gather(*tasks) return {ex: err_msg for ex, err_msg in zip(exchanges, results)} async def all_balances_all_exchanges(self, client_config_map: ClientConfigMap) -> Dict[str, Dict[str, Decimal]]: await self.update_exchanges(client_config_map) return {k: v.get_all_balances() for k, v in sorted(self._markets.items(), key=lambda x: x[0])} def METHOD_NAME(self) -> Dict[str, Dict[str, Decimal]]: return {k: v.available_balances for k, v in sorted(self._markets.items(), key=lambda x: x[0])} async def balances(self, exchange, client_config_map: ClientConfigMap, *symbols) -> Dict[str, Decimal]: if await self.update_exchange_balance(exchange, client_config_map) is None: results = {} for token, bal in self.all_balances(exchange).items(): matches = [s for s in symbols if s.lower() == token.lower()] if matches: results[matches[0]] = bal return results @staticmethod def validate_ethereum_wallet() -> Optional[str]: return "Connector deprecated." @staticmethod async def base_amount_ratio(exchange, trading_pair, balances) -> Optional[Decimal]: try: base, quote = trading_pair.split("-") base_amount = balances.get(base, 0) quote_amount = balances.get(quote, 0) price = await get_last_price(exchange, trading_pair) total_value = base_amount + (quote_amount / price) return None if total_value <= 0 else base_amount / total_value except Exception: return None
null
1,434
# Copyright (c) ZenML GmbH 2022. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing # permissions and limitations under the License. """Amazon S3 artifact store flavor.""" import json import re from typing import ( TYPE_CHECKING, Any, ClassVar, Dict, Optional, Set, Type, Union, ) from pydantic import validator from zenml.artifact_stores import ( BaseArtifactStoreConfig, BaseArtifactStoreFlavor, ) from zenml.integrations.s3 import S3_ARTIFACT_STORE_FLAVOR from zenml.models import ServiceConnectorRequirements from zenml.stack.authentication_mixin import AuthenticationConfigMixin from zenml.utils.networking_utils import ( replace_localhost_with_internal_hostname, ) from zenml.utils.secret_utils import SecretField if TYPE_CHECKING: from zenml.integrations.s3.artifact_stores import S3ArtifactStore class S3ArtifactStoreConfig( BaseArtifactStoreConfig, AuthenticationConfigMixin ): """Configuration for the S3 Artifact Store. All attributes of this class except `path` will be passed to the `s3fs.S3FileSystem` initialization. See [here](https://s3fs.readthedocs.io/en/latest/) for more information on how to use those configuration options to connect to any S3-compatible storage. When you want to register an S3ArtifactStore from the CLI and need to pass `client_kwargs`, `config_kwargs` or `s3_additional_kwargs`, you should pass them as a json string: ``` zenml artifact-store register my_s3_store --flavor=s3 \ --path=s3://my_bucket --client_kwargs='{"endpoint_url": "http://my-s3-endpoint"}' ``` """ SUPPORTED_SCHEMES: ClassVar[Set[str]] = {"s3://"} key: Optional[str] = SecretField() secret: Optional[str] = SecretField() token: Optional[str] = SecretField() client_kwargs: Optional[Dict[str, Any]] = None config_kwargs: Optional[Dict[str, Any]] = None s3_additional_kwargs: Optional[Dict[str, Any]] = None @validator( "client_kwargs", "config_kwargs", "s3_additional_kwargs", pre=True ) def _convert_json_string( cls, value: Union[None, str, Dict[str, Any]] ) -> Optional[Dict[str, Any]]: """Converts potential JSON strings passed via the CLI to dictionaries. Args: value: The value to convert. Returns: The converted value. Raises: TypeError: If the value is not a `str`, `Dict` or `None`. ValueError: If the value is an invalid json string or a json string that does not decode into a dictionary. """ if isinstance(value, str): try: dict_ = json.loads(value) except json.JSONDecodeError as e: raise ValueError(f"Invalid json string '{value}'") from e if not isinstance(dict_, Dict): raise ValueError( f"Json string '{value}' did not decode into a dictionary." ) return dict_ elif isinstance(value, Dict) or value is None: return value else: raise TypeError(f"{value} is not a json string or a dictionary.") @validator("client_kwargs") def _validate_client_kwargs( cls, value: Optional[Dict[str, Any]] ) -> Optional[Dict[str, Any]]: """Validates the `client_kwargs` attribute. Args: value: The value to validate. Raises: ValueError: If the value is not a valid URL. Returns: The validated value. """ if value is None: return value if "endpoint_url" in value and value["endpoint_url"]: url = value["endpoint_url"].rstrip("/") scheme = re.search("^([a-z0-9]+://)", url) if scheme is None or scheme.group() not in ("https://", "http://"): raise ValueError( "Invalid URL for endpoint url: {url}. Should be in the form " "https://hostname[:port] or http://hostname[:port]." ) # When running inside a container, if the URL uses localhost, the # target service will not be available. We try to replace localhost # with one of the special Docker or K3D internal hostnames. value["endpoint_url"] = replace_localhost_with_internal_hostname( url ) return value class S3ArtifactStoreFlavor(BaseArtifactStoreFlavor): """Flavor of the S3 artifact store.""" @property def name(self) -> str: """Name of the flavor. Returns: The name of the flavor. """ return S3_ARTIFACT_STORE_FLAVOR @property def service_connector_requirements( self, ) -> Optional[ServiceConnectorRequirements]: """Service connector resource requirements for service connectors. Specifies resource requirements that are used to filter the available service connector types that are compatible with this flavor. Returns: Requirements for compatible service connectors, if a service connector is required for this flavor. """ return ServiceConnectorRequirements( resource_type="s3-bucket", resource_id_attr="path", ) @property def METHOD_NAME(self) -> Optional[str]: """A url to point at docs explaining this flavor. Returns: A flavor docs url. """ return self.generate_default_docs_url() @property def sdk_docs_url(self) -> Optional[str]: """A url to point at SDK docs explaining this flavor. Returns: A flavor SDK docs url. """ return self.generate_default_sdk_docs_url() @property def logo_url(self) -> str: """A url to represent the flavor in the dashboard. Returns: The flavor logo. """ return "https://public-flavor-logos.s3.eu-central-1.amazonaws.com/artifact_store/aws.png" @property def config_class(self) -> Type[S3ArtifactStoreConfig]: """The config class of the flavor. Returns: The config class of the flavor. """ return S3ArtifactStoreConfig @property def implementation_class(self) -> Type["S3ArtifactStore"]: """Implementation class for this flavor. Returns: The implementation class for this flavor. """ from zenml.integrations.s3.artifact_stores import S3ArtifactStore return S3ArtifactStore
null
1,435
import unittest import torch import torch.nn as nn import torchvision import lightly from lightly.models import ResNetGenerator, SimCLR def get_backbone(resnet, num_ftrs=64): last_conv_channels = list(resnet.children())[-1].in_features backbone = nn.Sequential( lightly.models.batchnorm.get_norm_layer(3, 0), *list(resnet.children())[:-1], nn.Conv2d(last_conv_channels, num_ftrs, 1), nn.AdaptiveAvgPool2d(1), ) return backbone class TestModelsSimCLR(unittest.TestCase): def METHOD_NAME(self): self.resnet_variants = ["resnet-18", "resnet-50"] self.batch_size = 2 self.input_tensor = torch.rand((self.batch_size, 3, 32, 32)) def test_create_variations_cpu(self): for model_name in self.resnet_variants: resnet = ResNetGenerator(model_name) model = SimCLR(get_backbone(resnet)) self.assertIsNotNone(model) def test_create_variations_gpu(self): device = "cuda" if torch.cuda.is_available() else "cpu" if device == "cuda": for model_name in self.resnet_variants: resnet = ResNetGenerator(model_name) model = SimCLR(get_backbone(resnet)).to(device) self.assertIsNotNone(model) else: pass def test_feature_dim_configurable(self): device = "cuda" if torch.cuda.is_available() else "cpu" for model_name in self.resnet_variants: for num_ftrs, out_dim in zip([16, 64], [64, 256]): resnet = ResNetGenerator(model_name) model = SimCLR( get_backbone(resnet, num_ftrs=num_ftrs), num_ftrs=num_ftrs, out_dim=out_dim, ).to(device) # check that feature vector has correct dimension with torch.no_grad(): out_features = model.backbone(self.input_tensor.to(device)) self.assertEqual(out_features.shape[1], num_ftrs) # check that projection head output has right dimension with torch.no_grad(): out_projection = model.projection_head(out_features.squeeze()) self.assertEqual(out_projection.shape[1], out_dim) self.assertIsNotNone(model) def test_variations_input_dimension(self): device = "cuda" if torch.cuda.is_available() else "cpu" for model_name in self.resnet_variants: for input_width, input_height in zip([32, 64], [64, 64]): resnet = ResNetGenerator(model_name) model = SimCLR(get_backbone(resnet, num_ftrs=32)).to(device) input_tensor = torch.rand( (self.batch_size, 3, input_height, input_width) ) with torch.no_grad(): out = model(input_tensor.to(device)) self.assertIsNotNone(model) self.assertIsNotNone(out) def test_tuple_input(self): device = "cuda" if torch.cuda.is_available() else "cpu" resnet = ResNetGenerator("resnet-18") model = SimCLR(get_backbone(resnet, num_ftrs=32), out_dim=128).to(device) x0 = torch.rand((self.batch_size, 3, 64, 64)).to(device) x1 = torch.rand((self.batch_size, 3, 64, 64)).to(device) out = model(x0) self.assertEqual(out.shape, (self.batch_size, 128)) out, features = model(x0, return_features=True) self.assertEqual(out.shape, (self.batch_size, 128)) self.assertEqual(features.shape, (self.batch_size, 32)) out0, out1 = model(x0, x1) self.assertEqual(out0.shape, (self.batch_size, 128)) self.assertEqual(out1.shape, (self.batch_size, 128)) (out0, f0), (out1, f1) = model(x0, x1, return_features=True) self.assertEqual(out0.shape, (self.batch_size, 128)) self.assertEqual(out1.shape, (self.batch_size, 128)) self.assertEqual(f0.shape, (self.batch_size, 32)) self.assertEqual(f1.shape, (self.batch_size, 32)) if __name__ == "__main__": unittest.main()
null
1,436
# coding=utf-8 # Copyright 2023 The Uncertainty Baselines Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=line-too-long r"""ViT-B/16 finetuning on CIFAR 10 and CIFAR 100 using hypersweep. """ # pylint: enable=line-too-long import ml_collections def METHOD_NAME(): """Config for training a patch-transformer on JFT.""" config = ml_collections.ConfigDict() # Fine-tuning dataset config.dataset = 'cifar10' config.val_split = 'train[98%:]' config.train_split = 'train[:98%]' config.num_classes = 10 BATCH_SIZE = 512 # pylint: disable=invalid-name config.batch_size = BATCH_SIZE config.total_steps = 10_000 INPUT_RES = 384 # pylint: disable=invalid-name pp_common = '|value_range(-1, 1)' # pp_common += f'|onehot({config.num_classes})' # To use ancestor 'smearing', use this line instead: pp_common += f'|onehot({config.num_classes}, key="label", key_result="labels")' # pylint: disable=line-too-long pp_common += '|keep(["image", "labels"])' config.pp_train = f'decode|inception_crop({INPUT_RES})|flip_lr' + pp_common config.pp_eval = f'decode|resize({INPUT_RES})' + pp_common # OOD eval # ood_split is the data split for both the ood_dataset and the dataset. config.ood_datasets = ['cifar100', 'svhn_cropped'] config.ood_num_classes = [100, 10] config.ood_split = 'test' config.ood_methods = ['msp', 'entropy', 'maha', 'rmaha'] config.pp_eval_ood = [] # CIFAR-10H eval config.eval_on_cifar_10h = False config.pp_eval_cifar_10h = '' config.shuffle_buffer_size = 50_000 # Per host, so small-ish is ok. config.log_training_steps = 1 # NOTE: eval is very fast O(seconds) so it's fine to run it often. config.checkpoint_steps = 1000 config.checkpoint_timeout = 1 config.prefetch_to_device = 2 config.trial = 0 # Model section # pre-trained model ckpt file # !!! The below section should be modified per experiment config.model_init = '/path/to/pretrained_model_ckpt.npz' # Model definition to be copied from the pre-training config config.model = ml_collections.ConfigDict() config.model.patches = ml_collections.ConfigDict() config.model.patches.size = [16, 16] config.model.hidden_size = 768 config.model.transformer = ml_collections.ConfigDict() config.model.transformer.attention_dropout_rate = 0. config.model.transformer.dropout_rate = 0. config.model.transformer.mlp_dim = 3072 config.model.transformer.num_heads = 12 config.model.transformer.num_layers = 12 config.model.classifier = 'token' # Or 'gap' # This is "no head" fine-tuning, which we use by default config.model.representation_size = None # Gaussian process layer section config.gp_layer = ml_collections.ConfigDict() config.gp_layer.ridge_penalty = 1. # Disable momentum in order to use exact covariance update for finetuning. config.gp_layer.covmat_momentum = -1. config.gp_layer.mean_field_factor = 20. # Optimizer section config.optim_name = 'Momentum' config.optim = ml_collections.ConfigDict() config.grad_clip_norm = -1. config.weight_decay = None # No explicit weight decay config.loss = 'softmax_xent' config.lr = ml_collections.ConfigDict() config.lr.base = 0.001 config.lr.warmup_steps = 500 config.lr.decay_type = 'cosine' return config def get_sweep(hyper): """Sweeps over datasets.""" # pylint: disable=g-long-lambda c100 = lambda **kw: task( hyper, 'cifar100', 'train[:98%]', 'train[98%:]', ['cifar10', 'svhn_cropped'], [10, 10], n_cls=100, **kw) c10 = lambda **kw: task( hyper, 'cifar10', 'train[:98%]', 'train[98%:]', ['cifar100', 'svhn_cropped'], [100, 10], n_cls=10, **kw) # pylint: enable=g-long-lambda tasks = hyper.chainit([ # Same sizes as in default BiT-HyperRule, for models that supports hi-res. c100(size=384, steps=10_000, warmup=500), c100(size=384, steps=10_000, warmup=500, train_data_aug=False), c10(size=384, steps=10_000, warmup=500), c10(size=384, steps=10_000, warmup=500, train_data_aug=False), ]) model_init = [MODEL_INIT_I21K_VIT, MODEL_INIT_JFT_VIT, MODEL_INIT_I21K_VIT_GP, MODEL_INIT_JFT_VIT_GP] lr_grid = [1e-4, 3e-4, 5e-4, 1e-3, 3e-3, 5e-3, 1e-2] clip_grid = [-1.] mf_grid = [-1., 0.1, 0.15, 0.2, 0.25, 0.3, 0.5] return hyper.product([ hyper.sweep('config.model_init', model_init), tasks, hyper.sweep('config.lr.base', lr_grid), hyper.sweep('config.grad_clip_norm', clip_grid), hyper.sweep('config.gp_layer.mean_field_factor', mf_grid), ]) def fixed(hyper, **kw): return hyper.zipit( [hyper.fixed(f'config.{k}', v, length=1) for k, v in kw.items()]) def task(hyper, name, train, val, ood_name, ood_num_classes, n_cls, steps, warmup, size, train_data_aug=True): """Vision task with val and test splits.""" common = '|value_range(-1, 1)' common += f'|onehot({n_cls}, key="label", key_result="labels")' common += '|keep(["image", "labels"])' pp_eval = f'decode|resize({size})' + common if train_data_aug: pp_train = f'decode|inception_crop({size})|flip_lr' + common else: pp_train = f'decode|resize({size})' + common pp_eval_ood = [] for num_classes in ood_num_classes: if num_classes > n_cls: # Note that evaluation_fn ignores the entries with all zero labels for # evaluation. When num_classes > n_cls, we should use onehot{num_classes}, # otherwise the labels that are greater than n_cls will be encoded with # all zeros and then be ignored. pp_eval_ood.append( pp_eval.replace(f'onehot({n_cls}', f'onehot({num_classes}')) else: pp_eval_ood.append(pp_eval) task_hyper = { 'dataset': name, 'train_split': train, 'pp_train': pp_train, 'val_split': val, 'ood_datasets': ood_name, 'ood_num_classes': ood_num_classes, 'pp_eval': pp_eval, 'pp_eval_ood': pp_eval_ood, 'num_classes': n_cls, 'lr.warmup_steps': warmup, 'total_steps': steps, } if name == 'cifar10': # CIFAR-10H eval eval_on_cifar_10h = True pp_eval_cifar_10h = f'decode|resize({size})|value_range(-1, 1)|keep(["image", "labels"])' task_hyper.update({ 'eval_on_cifar_10h': eval_on_cifar_10h, 'pp_eval_cifar_10h': pp_eval_cifar_10h }) return fixed(hyper, **task_hyper)
null
1,437
# Copyright 2017-2023 Posit Software, PBC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import errno import functools import logging import os import shutil import tempfile from guild import config from guild import run as runlib from guild import util log = logging.getLogger("guild") def path(*names): names = [name for name in names if name] return os.path.join(config.guild_home(), *names) def runs_dir(deleted=False): if deleted: return trash_dir("runs") return path("runs") def trash_dir(name=None): return path("trash", name) def cache_dir(name=None): return path("cache", name) def pidfile(name): return path("proc", name) def logfile(name): return path("log", name) def remote_dir(name=None): # Use directory containing user config to store remote info. rest_path = [name] if name else [] config_path = config.user_config_path() if config_path: return os.path.join(os.path.dirname(config_path), "remotes", *rest_path) return path("remotes", name) def runs(root=None, sort=None, filter=None, force_root=False, base_runs=None): filter = filter or (lambda _: True) all_runs = ( _all_runs_f(root, force_root) if base_runs is None # else lambda: base_runs ) runs = [run for run in all_runs() if filter(run)] if sort: runs = sorted(runs, key=_run_sort_key(sort)) return runs def _all_runs_f(root, force_root): root = root or runs_dir() if force_root: return _default_all_runs_f(root) return util.find_apply( [ _zipfile_all_runs_f, _runs_under_parent_f, _default_all_runs_f, ], root, ) def _default_all_runs_f(root): return lambda: _all_runs(root) def _zipfile_all_runs_f(root): if not root or not root.lower().endswith(".zip"): return None from . import run_zip_proxy def f(): try: return run_zip_proxy.all_runs(root) except Exception as e: if log.getEffectiveLevel() <= logging.DEBUG: log.exception("getting runs for zip file %s", root) log.error("cannot read from %s: %s", root, e) return [] return f def _runs_under_parent_f(root): runs_parent = os.getenv("GUILD_RUNS_PARENT") if not runs_parent: return None log.debug("limitting to runs under parent %s", runs_parent) return lambda: _runs_for_parent(runs_parent, root) def _runs_for_parent(parent, root): parent_path = os.path.join(root, parent) try: names = os.listdir(parent_path) except OSError as e: if e.errno != errno.ENOENT: raise return [] else: return _runs_for_parent_links(parent_path, names, root) def _runs_for_parent_links(parent_path, names, runs_dir): real_paths = [util.realpath(os.path.join(parent_path, name)) for name in names] return [ runlib.for_dir(path) for path in real_paths if _is_parent_run_path(path, runs_dir) ] def _is_parent_run_path(path, runs_dir): return util.compare_paths(os.path.dirname(path), runs_dir) def run_filter(name, *args): if name.startswith("!"): name = name[1:] maybe_negate = lambda f: lambda r: not f(r) else: maybe_negate = lambda f: f if name == "true": filter = lambda _: True elif name == "attr": name, expected = args filter = lambda r: _run_attr(r, name) == expected elif name == "all": (filters,) = args filter = lambda r: all((f(r) for f in filters)) elif name == "any": (filters,) = args filter = lambda r: any((f(r) for f in filters)) else: raise ValueError(name) return maybe_negate(filter) def _all_runs(root): return [runlib.Run(name, path) for name, path in _iter_dirs(root)] def METHOD_NAME(root=None): return _iter_dirs(root or runs_dir()) def _iter_dirs(root): try: names = os.listdir(root) except OSError: names = [] for name in names: path = os.path.join(root, name) if _opref_exists(path): yield name, path def _opref_exists(run_dir): opref_path = os.path.join(run_dir, ".guild", "opref") return os.path.exists(opref_path) def _run_sort_key(sort): return functools.cmp_to_key(lambda x, y: _run_cmp(x, y, sort)) def _run_cmp(x, y, sort): for attr in sort: attr_cmp = _run_attr_cmp(x, y, attr) if attr_cmp != 0: return attr_cmp return 0 def _run_attr_cmp(x, y, attr): if attr.startswith("-"): attr = attr[1:] rev = -1 else: rev = 1 x_val = _run_attr(x, attr) if x_val is None: return -rev y_val = _run_attr(y, attr) if y_val is None: return rev return rev * ((x_val > y_val) - (x_val < y_val)) def _run_attr(run, name): if name in runlib.Run.__properties__: return getattr(run, name) return run.get(name) def delete_runs(runs, permanent=False): for run in runs: src = run.dir if permanent: _delete_run(src) else: dest = os.path.join(runs_dir(deleted=True), run.id) _move(src, dest) def purge_runs(runs): for run in runs: _delete_run(run.dir) def _delete_run(src): assert src and src != os.path.sep, src assert src.startswith(runs_dir()) or src.startswith(runs_dir(deleted=True)), src log.debug("deleting %s", src) shutil.rmtree(src) def _move(src, dest): util.ensure_dir(os.path.dirname(dest)) log.debug("moving %s to %s", src, dest) if os.path.exists(dest): _move_to_backup(dest) shutil.move(src, dest) def _move_to_backup(path): dir = os.path.dirname(path) prefix = f"{os.path.basename(path)}_" backup = tempfile.NamedTemporaryFile(prefix=prefix, dir=dir, delete=True) log.warning("%s exists, moving to %s", path, backup.name) backup.close() shutil.move(path, backup.name) def restore_runs(runs): for run in runs: src = os.path.join(run.dir) dest = os.path.join(runs_dir(), run.id) if util.compare_paths(src, dest): log.warning("%s is already restored, skipping", run.id) continue _move(src, dest) def find_runs(run_id_prefix, root=None): root = root or runs_dir() return ( (name, path) for name, path in _iter_dirs(root) if name.startswith(run_id_prefix) ) def get_run(run_id, root=None): root = root or runs_dir() path = os.path.join(root, run_id) if os.path.exists(path): return runlib.Run(run_id, path) raise LookupError(run_id)
null
1,438
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkvpc.endpoint import endpoint_data class AllocateEipAddressRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Vpc', '2016-04-28', 'AllocateEipAddress','vpc') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_IpAddress(self): # String return self.get_query_params().get('IpAddress') def set_IpAddress(self, IpAddress): # String self.add_query_param('IpAddress', IpAddress) def get_ResourceOwnerId(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_PublicIpAddressPoolId(self): # String return self.get_query_params().get('PublicIpAddressPoolId') def set_PublicIpAddressPoolId(self, PublicIpAddressPoolId): # String self.add_query_param('PublicIpAddressPoolId', PublicIpAddressPoolId) def get_ClientToken(self): # String return self.get_query_params().get('ClientToken') def METHOD_NAME(self, ClientToken): # String self.add_query_param('ClientToken', ClientToken) def get_ISP(self): # String return self.get_query_params().get('ISP') def set_ISP(self, ISP): # String self.add_query_param('ISP', ISP) def get_Description(self): # String return self.get_query_params().get('Description') def set_Description(self, Description): # String self.add_query_param('Description', Description) def get_ResourceGroupId(self): # String return self.get_query_params().get('ResourceGroupId') def set_ResourceGroupId(self, ResourceGroupId): # String self.add_query_param('ResourceGroupId', ResourceGroupId) def get_Zone(self): # String return self.get_query_params().get('Zone') def set_Zone(self, Zone): # String self.add_query_param('Zone', Zone) def get_Netmode(self): # String return self.get_query_params().get('Netmode') def set_Netmode(self, Netmode): # String self.add_query_param('Netmode', Netmode) def get_InstanceChargeType(self): # String return self.get_query_params().get('InstanceChargeType') def set_InstanceChargeType(self, InstanceChargeType): # String self.add_query_param('InstanceChargeType', InstanceChargeType) def get_Period(self): # Integer return self.get_query_params().get('Period') def set_Period(self, Period): # Integer self.add_query_param('Period', Period) def get_AutoPay(self): # Boolean return self.get_query_params().get('AutoPay') def set_AutoPay(self, AutoPay): # Boolean self.add_query_param('AutoPay', AutoPay) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_Bandwidth(self): # String return self.get_query_params().get('Bandwidth') def set_Bandwidth(self, Bandwidth): # String self.add_query_param('Bandwidth', Bandwidth) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def set_OwnerAccount(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_ActivityId(self): # Long return self.get_query_params().get('ActivityId') def set_ActivityId(self, ActivityId): # Long self.add_query_param('ActivityId', ActivityId) def get_InstanceId(self): # String return self.get_query_params().get('InstanceId') def set_InstanceId(self, InstanceId): # String self.add_query_param('InstanceId', InstanceId) def get_InternetChargeType(self): # String return self.get_query_params().get('InternetChargeType') def set_InternetChargeType(self, InternetChargeType): # String self.add_query_param('InternetChargeType', InternetChargeType) def get_Name(self): # String return self.get_query_params().get('Name') def set_Name(self, Name): # String self.add_query_param('Name', Name) def get_SecurityProtectionTypess(self): # RepeatList return self.get_query_params().get('SecurityProtectionTypes') def set_SecurityProtectionTypess(self, SecurityProtectionTypes): # RepeatList for depth1 in range(len(SecurityProtectionTypes)): self.add_query_param('SecurityProtectionTypes.' + str(depth1 + 1), SecurityProtectionTypes[depth1]) def get_PricingCycle(self): # String return self.get_query_params().get('PricingCycle') def set_PricingCycle(self, PricingCycle): # String self.add_query_param('PricingCycle', PricingCycle)
null
1,439
########################################################################## # # Copyright (c) 2007-2011, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # * Neither the name of Image Engine Design nor the names of any # other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import os, sys, traceback import inspect, string import warnings import IECore ## Set the environment variable and the current LevelFilteredMessageHandler. # Parameters: # level: a string with the name of the log level as defined in MessageHandler.Level. # # This function sets the $IECORE_LOG_LEVEL environment variable, so child processes will inherit the log level. # If the current message handler is also a LevelFilteredMessageHandler, this function pushes # it from the stack and register the new one. # ## \ingroup python def setLogLevelByName( levelName ): IECore.setLogLevel( IECore.MessageHandler.stringAsLevel( levelName ) ) ## Set the environment variable and the current LevelFilteredMessageHandler. # Parameters: # level: MessageHandler.Level value. # # This function sets the $IECORE_LOG_LEVEL environment variable, so child processes will inherit the log level. # If the current message handler is also a LevelFilteredMessageHandler, this function pushes # it from the stack and register the new one. ## \ingroup python def setLogLevel( level ): assert( isinstance( level, IECore.MessageHandler.Level ) and level!=IECore.MessageHandler.Level.Invalid ) os.environ["IECORE_LOG_LEVEL"] = IECore.MessageHandler.levelAsString( level ) current = IECore.MessageHandler.currentHandler() if not isinstance( current, IECore.LevelFilteredMessageHandler ) : IECore.msg( IECore.Msg.Level.Warning, "IECore.setLogLevel", "Failed to set log level - current handler is not a LevelFilteredMessageHandler" ) return current.setLevel( level ) IECore.debug("setLogLevel(", level, ")") def __getCallStr(frame): return frame.f_globals.get("__name__", frame.f_globals.get("__file__", "N/A")) def __getCallContext(frame = None, withLineNumber = False): if frame is None: f = inspect.currentframe().f_back.f_back else: f = frame callStr = __getCallStr(f) if withLineNumber: callStr += " #" + str(f.f_lineno) return callStr ## Help function to track dificult errors. # It prints the callstack giving the module name and the line number. ## \ingroup python def showCallStack(): f = inspect.currentframe().f_back.f_back index = 0 callstack = "Callstack:\n" while not f is None: callstack += "> " + str(index) + ": " + __getCallStr(f) + " #" + str(f.f_lineno) + "\n" f = f.f_back index += 1 IECore.Msg.output(IECore.Msg.Level.Debug, __getCallContext( withLineNumber = True ), callstack ) ## Use this function to get information about the context where the exception happened. # Returns a tuple of strings (location, stack trace) for the captured exception. ## \ingroup python def exceptionInfo(): (exceptionType, exception, trace) = sys.exc_info() etb = traceback.extract_tb(trace) exceptionType = str(exceptionType.__name__) + ": " + str(exception) exceptInfo = "" for (module, line, function, location) in etb: exceptInfo += " File " + str(module) + ", line " + str(line) + ", in " + str(function) + "\n> " + str(location) + "\n" return ( __getCallContext( withLineNumber = True ), "Exception traceback:\n" + exceptInfo + exceptionType) ## Sends debug messages to the current message handler and appends a full description of the catched exception. # Parameters: # Any string or object. They are converted to string and separated by space. ## \ingroup python def debugException(*args): # same as debug stdStr = " ".join(map(str, args)) (exceptionType, exception, trace) = sys.exc_info() etb = traceback.extract_tb(trace) exceptionType = "> " + str(exceptionType.__name__) + ": " + str(exception) exceptInfo = "" for (module, line, function, location) in etb: exceptInfo += "> File " + str(module) + ", line " + str(line) + ", in " + str(function) + "\n> " + str(location) + "\n" IECore.Msg.output(IECore.Msg.Level.Debug, __getCallContext( withLineNumber = True ), "[EXCEPTION CAPTURED] " + stdStr + "\n> Exception traceback:\n" + exceptInfo + exceptionType) ## Sends debug messages to the current message handler. # Every message include information about the module and line number from where this function was called. # Parameters: # Any string or object. They are converted to string and separated by space. ## \ingroup python def debug(*args): stdStr = " ".join(map(str, args)) IECore.Msg.output(IECore.Msg.Level.Debug, __getCallContext( withLineNumber = True ), stdStr ) # Sends warning messages to the current message handler. # Parameters: # Any string or object. They are converted to string and separated by space. ## \ingroup python def warning(*args): stdStr = " ".join(map(str, args)) IECore.Msg.output(IECore.Msg.Level.Warning, __getCallContext(), stdStr ) # Sends info messages to the current message handler. # Parameters: # Any string or object. They are converted to string and separated by space. ## \ingroup python def METHOD_NAME(*args): stdStr = " ".join(map(str, args)) IECore.Msg.output(IECore.Msg.Level.Info, __getCallContext(), stdStr ) # Sends error messages to the current message handler. # Parameters: # Any string or object. They are converted to string and separated by space. ## \ingroup python def error(*args): stdStr = " ".join(map(str, args)) IECore.Msg.output(IECore.Msg.Level.Error, __getCallContext(), stdStr ) __all__ = [ "setLogLevelByName", "setLogLevel", "showCallStack", "exceptionInfo", "debugException", "debug", "warning", "info", "error", ]
null
1,440
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkrds.endpoint import endpoint_data import json class ModifyDBInstanceSpecRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Rds', '2014-08-15', 'ModifyDBInstanceSpec') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ResourceOwnerId(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_DBInstanceStorage(self): # Integer return self.get_query_params().get('DBInstanceStorage') def set_DBInstanceStorage(self, DBInstanceStorage): # Integer self.add_query_param('DBInstanceStorage', DBInstanceStorage) def get_EngineVersion(self): # String return self.get_query_params().get('EngineVersion') def set_EngineVersion(self, EngineVersion): # String self.add_query_param('EngineVersion', EngineVersion) def get_AutoUseCoupon(self): # Boolean return self.get_query_params().get('AutoUseCoupon') def set_AutoUseCoupon(self, AutoUseCoupon): # Boolean self.add_query_param('AutoUseCoupon', AutoUseCoupon) def get_ResourceGroupId(self): # String return self.get_query_params().get('ResourceGroupId') def set_ResourceGroupId(self, ResourceGroupId): # String self.add_query_param('ResourceGroupId', ResourceGroupId) def get_ServerlessConfiguration(self): # Struct return self.get_query_params().get('ServerlessConfiguration') def set_ServerlessConfiguration(self, ServerlessConfiguration): # Struct self.add_query_param("ServerlessConfiguration", json.dumps(ServerlessConfiguration)) def get_EffectiveTime(self): # String return self.get_query_params().get('EffectiveTime') def set_EffectiveTime(self, EffectiveTime): # String self.add_query_param('EffectiveTime', EffectiveTime) def get_DBInstanceId(self): # String return self.get_query_params().get('DBInstanceId') def set_DBInstanceId(self, DBInstanceId): # String self.add_query_param('DBInstanceId', DBInstanceId) def get_SwitchTime(self): # String return self.get_query_params().get('SwitchTime') def METHOD_NAME(self, SwitchTime): # String self.add_query_param('SwitchTime', SwitchTime) def get_DBInstanceStorageType(self): # String return self.get_query_params().get('DBInstanceStorageType') def set_DBInstanceStorageType(self, DBInstanceStorageType): # String self.add_query_param('DBInstanceStorageType', DBInstanceStorageType) def get_SourceBiz(self): # String return self.get_query_params().get('SourceBiz') def set_SourceBiz(self, SourceBiz): # String self.add_query_param('SourceBiz', SourceBiz) def get_DedicatedHostGroupId(self): # String return self.get_query_params().get('DedicatedHostGroupId') def set_DedicatedHostGroupId(self, DedicatedHostGroupId): # String self.add_query_param('DedicatedHostGroupId', DedicatedHostGroupId) def get_Direction(self): # String return self.get_query_params().get('Direction') def set_Direction(self, Direction): # String self.add_query_param('Direction', Direction) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def set_OwnerAccount(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_UsedTime(self): # Long return self.get_query_params().get('UsedTime') def set_UsedTime(self, UsedTime): # Long self.add_query_param('UsedTime', UsedTime) def get_BurstingEnabled(self): # Boolean return self.get_query_params().get('BurstingEnabled') def set_BurstingEnabled(self, BurstingEnabled): # Boolean self.add_query_param('BurstingEnabled', BurstingEnabled) def get_TargetMinorVersion(self): # String return self.get_query_params().get('TargetMinorVersion') def set_TargetMinorVersion(self, TargetMinorVersion): # String self.add_query_param('TargetMinorVersion', TargetMinorVersion) def get_DBInstanceClass(self): # String return self.get_query_params().get('DBInstanceClass') def set_DBInstanceClass(self, DBInstanceClass): # String self.add_query_param('DBInstanceClass', DBInstanceClass) def get_ZoneId(self): # String return self.get_query_params().get('ZoneId') def set_ZoneId(self, ZoneId): # String self.add_query_param('ZoneId', ZoneId) def get_Category(self): # String return self.get_query_params().get('Category') def set_Category(self, Category): # String self.add_query_param('Category', Category) def get_PayType(self): # String return self.get_query_params().get('PayType') def set_PayType(self, PayType): # String self.add_query_param('PayType', PayType)
null
1,441
# Copyright 2021 Camptocamp (http://www.camptocamp.com). # @author Simone Orsi <[email protected]> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from odoo import exceptions from .common import TestUserManagementCommmon class TestUserManagement(TestUserManagementCommmon): def test_search_as_admin(self): service = self._get_service(self.company) expected = self.user2_binding + self.user3_binding + self.user_binding self._test_search(service, expected) def test_search_as_simpleuser(self): for binding in ( self.user_binding, self.user2_binding, self.user3_binding, ): service = self._get_service(binding.record_id) with self.assertRaisesRegex(exceptions.AccessError, "User not allowed"): service.dispatch("search") def test_create_as_admin(self): service = self._get_service(self.company) params = { "name": "John Doe", "email": "[email protected]", } self._test_create(service, params) def test_create_as_simpleuser(self): for binding in ( self.user_binding, self.user2_binding, self.user3_binding, ): service = self._get_service(binding.record_id) params = { "name": "Created by " + binding.name, "email": "created_by_" + binding.email, } with self.assertRaisesRegex(exceptions.AccessError, "User not allowed"): service.dispatch("create", params=params) def test_update_as_admin(self): service = self._get_service(self.company) params = { "name": self.user_binding.name + " UPDATED", } service.dispatch("update", self.user_binding.id, params=params)["data"] partner = self.company.child_ids.filtered_domain( [("email", "=", self.user_binding.email)] ) self.assertEqual(partner.name, "Simple user UPDATED") def test_update_as_simpleuser(self): for binding in ( self.user_binding, self.user2_binding, self.user3_binding, ): service = self._get_service(binding.record_id) params = { "name": self.user_binding.name + " UPDATED", } with self.assertRaisesRegex(exceptions.AccessError, "User not allowed"): service.dispatch("update", self.user_binding.id, params) def test_delete_as_admin(self): service = self._get_service(self.company) child_partner2 = self.user2_binding.record_id # Delete its user service.dispatch("delete", self.user2_binding.id) # The binding is gone self.assertFalse(self.user2_binding.exists()) # BUT since its partner had another user it won't be touched self.assertTrue(child_partner2.active) # Let's delete the other user child_partner3 = self.user3_binding.record_id service.dispatch("delete", self.user3_binding.id) self.assertFalse(self.user3_binding.exists()) # Now its partner is archived self.assertFalse(child_partner3.active) def test_delete_as_simpleuser(self): for binding in ( self.user_binding, self.user2_binding, self.user3_binding, ): service = self._get_service(binding.record_id) with self.assertRaisesRegex(exceptions.AccessError, "User not allowed"): service.dispatch("delete", self.user2_binding.id) class TestUserManagementDelegateManage(TestUserManagementCommmon): @classmethod def setUpClass(cls): super().setUpClass() cls.user_binding.can_manage_users = True cls.user2_binding.can_manage_users = True # Test delegated permission: manage users def test_search_as_simpleuser_delegate_manage_users(self): # This user has no sub users, no result binding = self.user_binding service = self._get_service(binding.record_id) expected = self.user2_binding.browse() self._test_search(service, expected) # This user has a sub user, should find it binding = self.user2_binding service = self._get_service(binding.record_id) expected = self.user3_binding self._test_search(service, expected) def METHOD_NAME(self): binding = self.user_binding self.assertFalse(binding.child_ids) service = self._get_service(binding.record_id) params = { "name": "Created by " + binding.name, "email": "created_by_" + binding.email, } new_user = self._test_create(service, params) service.dispatch("delete", new_user.id) self.assertFalse(new_user.exists()) def test_delete_as_simpleuser_delegate_manage_users(self): binding = self.user_binding service = self._get_service(binding.record_id) child_partner2 = self.user2_binding.record_id # Delete a user that does not belong to him with self.assertRaises(exceptions.MissingError): service.dispatch("delete", self.user2_binding.id) # Try to delete its sub user instead binding = self.user2_binding service = self._get_service(binding.record_id) child_partner3 = self.user3_binding.record_id service.dispatch("delete", self.user3_binding.id) self.assertFalse(self.user3_binding.exists()) # BUT since its partner had another user it won't be touched self.assertTrue(child_partner2.active) self.assertFalse(child_partner3.active)
null
1,442
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkdomain.endpoint import endpoint_data class SaveSingleTaskForCreatingOrderActivateRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Domain', '2018-01-29', 'SaveSingleTaskForCreatingOrderActivate') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_Country(self): # String return self.get_query_params().get('Country') def set_Country(self, Country): # String self.add_query_param('Country', Country) def get_SubscriptionDuration(self): # Integer return self.get_query_params().get('SubscriptionDuration') def set_SubscriptionDuration(self, SubscriptionDuration): # Integer self.add_query_param('SubscriptionDuration', SubscriptionDuration) def get_PermitPremiumActivation(self): # Boolean return self.get_query_params().get('PermitPremiumActivation') def set_PermitPremiumActivation(self, PermitPremiumActivation): # Boolean self.add_query_param('PermitPremiumActivation', PermitPremiumActivation) def get_City(self): # String return self.get_query_params().get('City') def set_City(self, City): # String self.add_query_param('City', City) def METHOD_NAME(self): # String return self.get_query_params().get('Dns2') def set_Dns2(self, Dns2): # String self.add_query_param('Dns2', Dns2) def get_Dns1(self): # String return self.get_query_params().get('Dns1') def set_Dns1(self, Dns1): # String self.add_query_param('Dns1', Dns1) def get_RegistrantProfileId(self): # Long return self.get_query_params().get('RegistrantProfileId') def set_RegistrantProfileId(self, RegistrantProfileId): # Long self.add_query_param('RegistrantProfileId', RegistrantProfileId) def get_CouponNo(self): # String return self.get_query_params().get('CouponNo') def set_CouponNo(self, CouponNo): # String self.add_query_param('CouponNo', CouponNo) def get_AliyunDns(self): # Boolean return self.get_query_params().get('AliyunDns') def set_AliyunDns(self, AliyunDns): # Boolean self.add_query_param('AliyunDns', AliyunDns) def get_ZhCity(self): # String return self.get_query_params().get('ZhCity') def set_ZhCity(self, ZhCity): # String self.add_query_param('ZhCity', ZhCity) def get_TelExt(self): # String return self.get_query_params().get('TelExt') def set_TelExt(self, TelExt): # String self.add_query_param('TelExt', TelExt) def get_ZhRegistrantName(self): # String return self.get_query_params().get('ZhRegistrantName') def set_ZhRegistrantName(self, ZhRegistrantName): # String self.add_query_param('ZhRegistrantName', ZhRegistrantName) def get_Province(self): # String return self.get_query_params().get('Province') def set_Province(self, Province): # String self.add_query_param('Province', Province) def get_PostalCode(self): # String return self.get_query_params().get('PostalCode') def set_PostalCode(self, PostalCode): # String self.add_query_param('PostalCode', PostalCode) def get_Lang(self): # String return self.get_query_params().get('Lang') def set_Lang(self, Lang): # String self.add_query_param('Lang', Lang) def get_Email(self): # String return self.get_query_params().get('Email') def set_Email(self, Email): # String self.add_query_param('Email', Email) def get_ZhRegistrantOrganization(self): # String return self.get_query_params().get('ZhRegistrantOrganization') def set_ZhRegistrantOrganization(self, ZhRegistrantOrganization): # String self.add_query_param('ZhRegistrantOrganization', ZhRegistrantOrganization) def get_Address(self): # String return self.get_query_params().get('Address') def set_Address(self, Address): # String self.add_query_param('Address', Address) def get_TelArea(self): # String return self.get_query_params().get('TelArea') def set_TelArea(self, TelArea): # String self.add_query_param('TelArea', TelArea) def get_DomainName(self): # String return self.get_query_params().get('DomainName') def set_DomainName(self, DomainName): # String self.add_query_param('DomainName', DomainName) def get_ZhAddress(self): # String return self.get_query_params().get('ZhAddress') def set_ZhAddress(self, ZhAddress): # String self.add_query_param('ZhAddress', ZhAddress) def get_RegistrantType(self): # String return self.get_query_params().get('RegistrantType') def set_RegistrantType(self, RegistrantType): # String self.add_query_param('RegistrantType', RegistrantType) def get_Telephone(self): # String return self.get_query_params().get('Telephone') def set_Telephone(self, Telephone): # String self.add_query_param('Telephone', Telephone) def get_TrademarkDomainActivation(self): # Boolean return self.get_query_params().get('TrademarkDomainActivation') def set_TrademarkDomainActivation(self, TrademarkDomainActivation): # Boolean self.add_query_param('TrademarkDomainActivation', TrademarkDomainActivation) def get_UseCoupon(self): # Boolean return self.get_query_params().get('UseCoupon') def set_UseCoupon(self, UseCoupon): # Boolean self.add_query_param('UseCoupon', UseCoupon) def get_ZhProvince(self): # String return self.get_query_params().get('ZhProvince') def set_ZhProvince(self, ZhProvince): # String self.add_query_param('ZhProvince', ZhProvince) def get_RegistrantOrganization(self): # String return self.get_query_params().get('RegistrantOrganization') def set_RegistrantOrganization(self, RegistrantOrganization): # String self.add_query_param('RegistrantOrganization', RegistrantOrganization) def get_PromotionNo(self): # String return self.get_query_params().get('PromotionNo') def set_PromotionNo(self, PromotionNo): # String self.add_query_param('PromotionNo', PromotionNo) def get_EnableDomainProxy(self): # Boolean return self.get_query_params().get('EnableDomainProxy') def set_EnableDomainProxy(self, EnableDomainProxy): # Boolean self.add_query_param('EnableDomainProxy', EnableDomainProxy) def get_UserClientIp(self): # String return self.get_query_params().get('UserClientIp') def set_UserClientIp(self, UserClientIp): # String self.add_query_param('UserClientIp', UserClientIp) def get_RegistrantName(self): # String return self.get_query_params().get('RegistrantName') def set_RegistrantName(self, RegistrantName): # String self.add_query_param('RegistrantName', RegistrantName) def get_UsePromotion(self): # Boolean return self.get_query_params().get('UsePromotion') def set_UsePromotion(self, UsePromotion): # Boolean self.add_query_param('UsePromotion', UsePromotion)
null
1,443
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkiot.endpoint import endpoint_data class CreateOTAStaticUpgradeJobRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Iot', '2018-01-20', 'CreateOTAStaticUpgradeJob') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_MultiModuleMode(self): return self.get_query_params().get('MultiModuleMode') def set_MultiModuleMode(self,MultiModuleMode): self.add_query_param('MultiModuleMode',MultiModuleMode) def get_RetryCount(self): return self.get_query_params().get('RetryCount') def set_RetryCount(self,RetryCount): self.add_query_param('RetryCount',RetryCount) def METHOD_NAME(self): return self.get_query_params().get('TimeoutInMinutes') def set_TimeoutInMinutes(self,TimeoutInMinutes): self.add_query_param('TimeoutInMinutes',TimeoutInMinutes) def get_NeedConfirm(self): return self.get_query_params().get('NeedConfirm') def set_NeedConfirm(self,NeedConfirm): self.add_query_param('NeedConfirm',NeedConfirm) def get_GroupType(self): return self.get_query_params().get('GroupType') def set_GroupType(self,GroupType): self.add_query_param('GroupType',GroupType) def get_NeedPush(self): return self.get_query_params().get('NeedPush') def set_NeedPush(self,NeedPush): self.add_query_param('NeedPush',NeedPush) def get_IotInstanceId(self): return self.get_query_params().get('IotInstanceId') def set_IotInstanceId(self,IotInstanceId): self.add_query_param('IotInstanceId',IotInstanceId) def get_DownloadProtocol(self): return self.get_query_params().get('DownloadProtocol') def set_DownloadProtocol(self,DownloadProtocol): self.add_query_param('DownloadProtocol',DownloadProtocol) def get_TargetSelection(self): return self.get_query_params().get('TargetSelection') def set_TargetSelection(self,TargetSelection): self.add_query_param('TargetSelection',TargetSelection) def get_ScheduleFinishTime(self): return self.get_query_params().get('ScheduleFinishTime') def set_ScheduleFinishTime(self,ScheduleFinishTime): self.add_query_param('ScheduleFinishTime',ScheduleFinishTime) def get_Tags(self): return self.get_query_params().get('Tag') def set_Tags(self, Tags): for depth1 in range(len(Tags)): if Tags[depth1].get('Value') is not None: self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value')) if Tags[depth1].get('Key') is not None: self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key')) def get_GrayPercent(self): return self.get_query_params().get('GrayPercent') def set_GrayPercent(self,GrayPercent): self.add_query_param('GrayPercent',GrayPercent) def get_DnListFileUrl(self): return self.get_query_params().get('DnListFileUrl') def set_DnListFileUrl(self,DnListFileUrl): self.add_query_param('DnListFileUrl',DnListFileUrl) def get_GroupId(self): return self.get_query_params().get('GroupId') def set_GroupId(self,GroupId): self.add_query_param('GroupId',GroupId) def get_FirmwareId(self): return self.get_query_params().get('FirmwareId') def set_FirmwareId(self,FirmwareId): self.add_query_param('FirmwareId',FirmwareId) def get_ProductKey(self): return self.get_query_params().get('ProductKey') def set_ProductKey(self,ProductKey): self.add_query_param('ProductKey',ProductKey) def get_RetryInterval(self): return self.get_query_params().get('RetryInterval') def set_RetryInterval(self,RetryInterval): self.add_query_param('RetryInterval',RetryInterval) def get_SrcVersions(self): return self.get_query_params().get('SrcVersion') def set_SrcVersions(self, SrcVersions): for depth1 in range(len(SrcVersions)): if SrcVersions[depth1] is not None: self.add_query_param('SrcVersion.' + str(depth1 + 1) , SrcVersions[depth1]) def get_ScheduleTime(self): return self.get_query_params().get('ScheduleTime') def set_ScheduleTime(self,ScheduleTime): self.add_query_param('ScheduleTime',ScheduleTime) def get_OverwriteMode(self): return self.get_query_params().get('OverwriteMode') def set_OverwriteMode(self,OverwriteMode): self.add_query_param('OverwriteMode',OverwriteMode) def get_MaximumPerMinute(self): return self.get_query_params().get('MaximumPerMinute') def set_MaximumPerMinute(self,MaximumPerMinute): self.add_query_param('MaximumPerMinute',MaximumPerMinute) def get_TargetDeviceNames(self): return self.get_query_params().get('TargetDeviceName') def set_TargetDeviceNames(self, TargetDeviceNames): for depth1 in range(len(TargetDeviceNames)): if TargetDeviceNames[depth1] is not None: self.add_query_param('TargetDeviceName.' + str(depth1 + 1) , TargetDeviceNames[depth1]
null
1,444
# Copyright (c) 2023 The Regents of the University of California # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from abc import ABC, abstractmethod from typing import Any, Dict, List, Optional import urllib.parse class AbstractClient(ABC): def verify_status_code(self, status_code: int) -> None: """ Verifies that the status code is 200. :param status_code: The status code to verify. """ if status_code == 200: return if status_code == 429: raise Exception("Panic: Too many requests") if status_code == 401: raise Exception("Panic: Unauthorized") if status_code == 404: raise Exception("Panic: Not found") if status_code == 400: raise Exception("Panic: Bad request") if status_code == 500: raise Exception("Panic: Internal server error") raise Exception(f"Panic: Unknown status code {status_code}") def _url_validator(self, url: str) -> bool: """ Validates the provided URL. :param url: The URL to be validated. :return: True if the URL is valid, False otherwise. """ try: result = urllib.parse.urlparse(url) return all([result.scheme, result.netloc, result.path]) except: return False @abstractmethod def METHOD_NAME( self, resource_id: Optional[str] = None, resource_version: Optional[str] = None, gem5_version: Optional[str] = None, ) -> List[Dict[str, Any]]: """ :param resource_id: The ID of the Resource. Optional, if not set, all resources will be returned. :param resource_version: The version of the Resource. Optional, if not set, all resource versions will be returned. Note: If `resource_id` is not set, this parameter will be ignored. :param gem5_version: The version of gem5. Optional, if not set, all versions will be returned. :return: A list of all the Resources with the given ID. """ raise NotImplementedError def filter_incompatible_resources( self, resources_to_filter: List[Dict[str, Any]], gem5_version: Optional[str] = None, ) -> List[Dict[str, Any]]: """Returns a filtered list resources based on gem5 version compatibility. Note: This function assumes if the minor component of a resource's gem5_version is not specified, the resource is compatible with all minor versions of the same major version. Likewise, if no hot-fix component is specified, it is assumed that the resource is compatible with all hot-fix versions of the same minor version. * '20.1' would be compatible with gem5 '20.1.1.0' and '20.1.2.0'. * '21.5.2' would be compatible with gem5 '21.5.2.0' and '21.5.2.0'. * '22.3.2.4' would only be compatible with gem5 '22.3.2.4'. :param resources_to_filter: The list of resources to filter. :param gem5_version: The gem5 version in which the filtered resources should be compatible. If None, no filtering will be done. : """ if not gem5_version: return resources_to_filter filtered_resources = [] for resource in resources_to_filter: for version in resource["gem5_versions"]: if gem5_version.startswith(version): filtered_resources.append(resource) return filtered_resources def get_resources_by_id(self, resource_id: str) -> List[Dict[str, Any]]: """ :param resource_id: The ID of the Resource. :return: A list of all the Resources with the given ID. """ return self.METHOD_NAME(resource_id=resource_id)
null
1,445
from typing import Callable import torch from nnunetv2.utilities.ddp_allgather import AllGatherGrad from torch import nn class SoftDiceLoss(nn.Module): def __init__(self, apply_nonlin: Callable = None, batch_dice: bool = False, do_bg: bool = True, smooth: float = 1., ddp: bool = True, clip_tp: float = None): """ """ super(SoftDiceLoss, self).__init__() self.do_bg = do_bg self.batch_dice = batch_dice self.apply_nonlin = apply_nonlin self.smooth = smooth self.clip_tp = clip_tp self.ddp = ddp def forward(self, x, y, loss_mask=None): shp_x = x.shape if self.batch_dice: axes = [0] + list(range(2, len(shp_x))) else: axes = list(range(2, len(shp_x))) if self.apply_nonlin is not None: x = self.apply_nonlin(x) tp, fp, fn, _ = METHOD_NAME(x, y, axes, loss_mask, False) if self.ddp and self.batch_dice: tp = AllGatherGrad.apply(tp).sum(0) fp = AllGatherGrad.apply(fp).sum(0) fn = AllGatherGrad.apply(fn).sum(0) if self.clip_tp is not None: tp = torch.clip(tp, min=self.clip_tp , max=None) nominator = 2 * tp denominator = 2 * tp + fp + fn dc = (nominator + self.smooth) / (torch.clip(denominator + self.smooth, 1e-8)) if not self.do_bg: if self.batch_dice: dc = dc[1:] else: dc = dc[:, 1:] dc = dc.mean() return -dc class MemoryEfficientSoftDiceLoss(nn.Module): def __init__(self, apply_nonlin: Callable = None, batch_dice: bool = False, do_bg: bool = True, smooth: float = 1., ddp: bool = True): """ saves 1.6 GB on Dataset017 3d_lowres """ super(MemoryEfficientSoftDiceLoss, self).__init__() self.do_bg = do_bg self.batch_dice = batch_dice self.apply_nonlin = apply_nonlin self.smooth = smooth self.ddp = ddp def forward(self, x, y, loss_mask=None): shp_x, shp_y = x.shape, y.shape if self.apply_nonlin is not None: x = self.apply_nonlin(x) if not self.do_bg: x = x[:, 1:] # make everything shape (b, c) axes = list(range(2, len(shp_x))) with torch.no_grad(): if len(shp_x) != len(shp_y): y = y.view((shp_y[0], 1, *shp_y[1:])) if all([i == j for i, j in zip(shp_x, shp_y)]): # if this is the case then gt is probably already a one hot encoding y_onehot = y else: gt = y.long() y_onehot = torch.zeros(shp_x, device=x.device, dtype=torch.bool) y_onehot.scatter_(1, gt, 1) if not self.do_bg: y_onehot = y_onehot[:, 1:] sum_gt = y_onehot.sum(axes) if loss_mask is None else (y_onehot * loss_mask).sum(axes) intersect = (x * y_onehot).sum(axes) if loss_mask is None else (x * y_onehot * loss_mask).sum(axes) sum_pred = x.sum(axes) if loss_mask is None else (x * loss_mask).sum(axes) if self.ddp and self.batch_dice: intersect = AllGatherGrad.apply(intersect).sum(0) sum_pred = AllGatherGrad.apply(sum_pred).sum(0) sum_gt = AllGatherGrad.apply(sum_gt).sum(0) if self.batch_dice: intersect = intersect.sum(0) sum_pred = sum_pred.sum(0) sum_gt = sum_gt.sum(0) dc = (2 * intersect + self.smooth) / (torch.clip(sum_gt + sum_pred + self.smooth, 1e-8)) dc = dc.mean() return -dc def METHOD_NAME(net_output, gt, axes=None, mask=None, square=False): """ net_output must be (b, c, x, y(, z))) gt must be a label map (shape (b, 1, x, y(, z)) OR shape (b, x, y(, z))) or one hot encoding (b, c, x, y(, z)) if mask is provided it must have shape (b, 1, x, y(, z))) :param net_output: :param gt: :param axes: can be (, ) = no summation :param mask: mask must be 1 for valid pixels and 0 for invalid pixels :param square: if True then fp, tp and fn will be squared before summation :return: """ if axes is None: axes = tuple(range(2, len(net_output.size()))) shp_x = net_output.shape shp_y = gt.shape with torch.no_grad(): if len(shp_x) != len(shp_y): gt = gt.view((shp_y[0], 1, *shp_y[1:])) if all([i == j for i, j in zip(net_output.shape, gt.shape)]): # if this is the case then gt is probably already a one hot encoding y_onehot = gt else: gt = gt.long() y_onehot = torch.zeros(shp_x, device=net_output.device) y_onehot.scatter_(1, gt, 1) tp = net_output * y_onehot fp = net_output * (1 - y_onehot) fn = (1 - net_output) * y_onehot tn = (1 - net_output) * (1 - y_onehot) if mask is not None: with torch.no_grad(): mask_here = torch.tile(mask, (1, tp.shape[1], *[1 for i in range(2, len(tp.shape))])) tp *= mask_here fp *= mask_here fn *= mask_here tn *= mask_here # benchmark whether tiling the mask would be faster (torch.tile). It probably is for large batch sizes # OK it barely makes a difference but the implementation above is a tiny bit faster + uses less vram # (using nnUNetv2_train 998 3d_fullres 0) # tp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tp, dim=1)), dim=1) # fp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fp, dim=1)), dim=1) # fn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fn, dim=1)), dim=1) # tn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tn, dim=1)), dim=1) if square: tp = tp ** 2 fp = fp ** 2 fn = fn ** 2 tn = tn ** 2 if len(axes) > 0: tp = tp.sum(dim=axes, keepdim=False) fp = fp.sum(dim=axes, keepdim=False) fn = fn.sum(dim=axes, keepdim=False) tn = tn.sum(dim=axes, keepdim=False) return tp, fp, fn, tn if __name__ == '__main__': from nnunetv2.utilities.helpers import softmax_helper_dim1 pred = torch.rand((2, 3, 32, 32, 32)) ref = torch.randint(0, 3, (2, 32, 32, 32)) dl_old = SoftDiceLoss(apply_nonlin=softmax_helper_dim1, batch_dice=True, do_bg=False, smooth=0, ddp=False) dl_new = MemoryEfficientSoftDiceLoss(apply_nonlin=softmax_helper_dim1, batch_dice=True, do_bg=False, smooth=0, ddp=False) res_old = dl_old(pred, ref) res_new = dl_new(pred, ref) print(res_old, res_new)
null
1,446
from __future__ import print_function import IMP import IMP.em import IMP.test import IMP.core import IMP.atom import random import math class Tests(IMP.test.TestCase): """Class to test EM correlation restraint""" def load_density_map(self): mrw = IMP.em.MRCReaderWriter() self.scene = IMP.em.read_map( self.get_input_file_name("1z5s_10.mrc"), mrw) self.resolution = 10. self.voxel_size = 2. self.scene.get_header_writable().set_resolution(self.resolution) self.scene.update_voxel_size(self.voxel_size) self.scene.set_origin(34.0, 8.0, -92.0) def load_protein(self, pdb_filename): with self.open_input_file(pdb_filename) as fh: self.mp = IMP.atom.read_pdb( fh, self.imp_model, IMP.atom.CAlphaPDBSelector()) IMP.atom.add_radii(self.mp) self.radius_key = IMP.core.XYZR.get_radius_key() self.weight_key = IMP.atom.Mass.get_mass_key() self.particles = IMP.core.get_leaves(self.mp) def setUp(self): """Build test model and optimizer""" IMP.test.TestCase.setUp(self) IMP.set_log_level(IMP.SILENT) IMP.set_check_level(IMP.NONE) self.imp_model = IMP.Model() self.load_density_map() self.load_protein("1z5s_A.pdb") def METHOD_NAME(self): "Check that CC score does not change after grid and protein rotation" mrw = IMP.em.MRCReaderWriter() # create a rigid body _ = IMP.Particle(self.imp_model) # sample density map sampled_density_map = IMP.em.SampledDensityMap(self.scene.get_header()) sampled_density_map.set_particles(self.particles) sampled_density_map.resample() sampled_density_map.calcRMS() IMP.em.write_map( sampled_density_map, self.get_tmp_file_name("a1.mrc"), mrw) IMP.em.write_map(self.scene, self.get_tmp_file_name("a2.mrc"), mrw) # calculate CC threshold = sampled_density_map.get_header().dmin score1 = IMP.em.get_coarse_cc_coefficient( self.scene, sampled_density_map, threshold, True) print("score1:", score1) # determine a random transformation # t=IMP.algebra.Transformation3D(IMP.algebra.random_rotation(), # IMP.algebra.random_vector_in_unit_box()) # set a small random transformation translation = IMP.algebra.get_random_vector_in( IMP.algebra.get_unit_bounding_box_3d()) axis = IMP.algebra.get_random_vector_on( IMP.algebra.get_unit_sphere_3d()) rand_angle = random.uniform(-15. / 180 * math.pi, 15. / 180 * math.pi) r = IMP.algebra.get_rotation_about_axis(axis, rand_angle) t = IMP.algebra.Transformation3D(r, translation) for p in self.particles: IMP.core.XYZ(p).set_coordinates( t.get_transformed(IMP.core.XYZ(p).get_coordinates())) transformed_density = IMP.em.get_transformed(self.scene, t, threshold) sampled_density_map1 = IMP.em.SampledDensityMap( transformed_density.get_header()) sampled_density_map1.set_particles(self.particles) sampled_density_map1.resample() sampled_density_map1.calcRMS() transformed_density.calcRMS() threshold = sampled_density_map1.get_header().dmin IMP.em.write_map( sampled_density_map1, self.get_tmp_file_name("b1.mrc"), mrw) IMP.em.write_map( transformed_density, self.get_tmp_file_name("b2.mrc"), mrw) score2 = IMP.em.get_coarse_cc_coefficient(transformed_density, sampled_density_map1, threshold, True) print("score2:", score2) # move the particles back for the next test t_inv = t.get_inverse() for p in self.particles: IMP.core.XYZ(p).set_coordinates( t_inv.get_transformed(IMP.core.XYZ(p).get_coordinates())) # because rotation the grid changes the density a bit self.assertAlmostEqual(score1, score2, delta=0.05) def test_cc_with_sampled_grid_rotation(self): """CC score does not change after sampled grid and protein rotation""" # set a small random transformation translation = IMP.algebra.get_random_vector_in( IMP.algebra.get_unit_bounding_box_3d()) axis = IMP.algebra.get_random_vector_on( IMP.algebra.get_unit_sphere_3d()) rand_angle = random.uniform(-15. / 180 * math.pi, 15. / 180 * math.pi) r = IMP.algebra.get_rotation_about_axis(axis, rand_angle) t = IMP.algebra.Transformation3D(r, translation) # create a sampled density map of the particles sampled_density_map = IMP.em.SampledDensityMap(self.scene.get_header()) sampled_density_map.set_particles(self.particles) sampled_density_map.resample() sampled_density_map.calcRMS() # rotate the map threshold = sampled_density_map.get_header().dmin transformed_sampled_density = IMP.em.get_transformed( sampled_density_map, t) # transform the particles and resample the original map for p in self.particles: IMP.core.XYZ(p).set_coordinates( t.get_transformed(IMP.core.XYZ(p).get_coordinates())) sampled_density_map.resample() sampled_density_map.calcRMS() # calculate CC after rotating the particles threshold = sampled_density_map.get_header().dmin score1 = IMP.em.get_coarse_cc_coefficient( self.scene, sampled_density_map, threshold, True) print("score1:", score1) transformed_sampled_density.calcRMS() threshold = transformed_sampled_density.get_header().dmin score2 = IMP.em.get_coarse_cc_coefficient( self.scene, transformed_sampled_density, transformed_sampled_density.get_header().dmin, True) print("score2:", score2) # move the particles back for the next test t_inv = t.get_inverse() for p in self.particles: IMP.core.XYZ(p).set_coordinates( t_inv.get_transformed(IMP.core.XYZ(p).get_coordinates())) print("scores:", score1, ":", score2) self.assertAlmostEqual(score1, score2, delta=0.055) if __name__ == '__main__': IMP.test.main()
null
1,447
# -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # # Copyright 2021-2022 Valory AG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ------------------------------------------------------------------------------ """Tests for valory/abci connection, tendermint_encoder module.""" # pylint: skip-file from packages.valory.connections.abci.tendermint.abci.types_pb2 import ( # type: ignore ResponseListSnapshots, ) from packages.valory.connections.abci.tendermint_encoder import ( _TendermintProtocolEncoder, ) from packages.valory.protocols.abci import AbciMessage from packages.valory.protocols.abci.custom_types import Result, ResultType, SnapShots from packages.valory.protocols.abci.custom_types import Snapshot as CustomSnapshot class TestTendermintProtocolEncoder: """Test for the Tendermint protocol encoder.""" def test_response_exception(self) -> None: """Test decoding of a response exception.""" expected_error = "error" abci_message = AbciMessage( performative=AbciMessage.Performative.RESPONSE_EXCEPTION, # type: ignore error=expected_error, ) message = _TendermintProtocolEncoder.response_exception(abci_message) assert message.exception.error == expected_error def test_response_echo(self) -> None: """Test decoding of a response echo.""" expected_message = "message" abci_message = AbciMessage( performative=AbciMessage.Performative.RESPONSE_ECHO, # type: ignore message=expected_message, ) message = _TendermintProtocolEncoder.response_echo(abci_message) assert message.echo.message == expected_message def test_response_set_option(self) -> None: """Test decoding of a response set-option.""" expected_code = 0 expected_log = "log" expected_info = "info" abci_message = AbciMessage( performative=AbciMessage.Performative.RESPONSE_SET_OPTION, # type: ignore code=expected_code, log=expected_log, info=expected_info, ) message = _TendermintProtocolEncoder.response_set_option(abci_message) assert message.set_option.code == expected_code assert message.set_option.log == expected_log assert message.set_option.info == expected_info def test_response_list_snapshots(self) -> None: """Test decoding of a response list-snapshots.""" snapshot = CustomSnapshot(0, 0, 0, b"", b"") snapshots = SnapShots([snapshot]) # expected snapshots object list_snapshots = ResponseListSnapshots() snapshots_pb = [ _TendermintProtocolEncoder._encode_snapshot(snapshot) for snapshot in snapshots.snapshots ] list_snapshots.snapshots.extend(snapshots_pb) abci_message = AbciMessage( performative=AbciMessage.Performative.RESPONSE_LIST_SNAPSHOTS, # type: ignore snapshots=snapshots, ) message = _TendermintProtocolEncoder.response_list_snapshots(abci_message) assert message.list_snapshots == list_snapshots def METHOD_NAME(self) -> None: """Test decoding of a response offer-snapshot.""" expected_result = Result(ResultType.ACCEPT) abci_message = AbciMessage( performative=AbciMessage.Performative.RESPONSE_OFFER_SNAPSHOT, # type: ignore result=expected_result, ) message = _TendermintProtocolEncoder.response_offer_snapshot(abci_message) assert message.offer_snapshot.result == expected_result.result_type.value def test_response_load_snapshot_chunk(self) -> None: """Test decoding of a response load-snapshot-chunk.""" expected_chunk = b"chunk" abci_message = AbciMessage( performative=AbciMessage.Performative.RESPONSE_LOAD_SNAPSHOT_CHUNK, # type: ignore chunk=expected_chunk, ) message = _TendermintProtocolEncoder.response_load_snapshot_chunk(abci_message) assert message.load_snapshot_chunk.chunk == expected_chunk def test_response_apply_snapshot_chunk(self) -> None: """Test decoding of a response apply-snapshot-chunk.""" result = Result(ResultType.ACCEPT) abci_message = AbciMessage( performative=AbciMessage.Performative.RESPONSE_APPLY_SNAPSHOT_CHUNK, # type: ignore result=result, refetch_chunks=tuple(), reject_senders=tuple(), ) message = _TendermintProtocolEncoder.response_apply_snapshot_chunk(abci_message) assert message.apply_snapshot_chunk.result == result.result_type.value
null
1,448
#!/usr/bin/env python 'Corresponds to a glyph, for analysis purposes, for GDL generation' __url__ = 'https://github.com/silnrsi/pysilfont' __copyright__ = 'Copyright (c) 2012 SIL International (https://www.sil.org)' __license__ = 'Released under the MIT License (https://opensource.org/licenses/MIT)' import re, traceback from silfont.gdl.psnames import Name from xml.etree.cElementTree import SubElement # Convert from Graphite AP name to the standard name, eg upperM -> _upper def gr_ap(txt) : if txt.endswith('M') : return "_" + txt[:-1] elif txt.endswith('S') : return txt[:-1] else : return txt # Convert from standard AP name to the Graphite name, eg _upper -> upperM def ap_gr(txt) : if txt.startswith('_') : return txt[1:] + 'M' else : return txt + 'S' class Glyph(object) : isDia = False def __init__(self, name, gid = 0) : self.clear() self.setName(name) self.gdl = None self.gid = gid self.uid = "" # this is a string! self.comment = "" self.isDia = False def clear(self) : self.anchors = {} self.classes = set() self.gdl_properties = {} self.properties = {} def setName(self, name) : self.psname = name self.name = next(self.parseNames()) def setAnchor(self, name, x, y, t = None) : send = True if name in self.anchors : if x is None and y is None : del self.anchors[name] return True if x is None : x = self.anchors[name][0] if y is None : y = self.anchors[name][1] send = self.anchors[name] != (x, y) self.anchors[name] = (x, y) return send # if not name.startswith("_") and t != 'basemark' : # self.isBase = True def parseNames(self) : if self.psname : for name in self.psname.split("/") : res = Name(name) yield res else : yield None def GDLName(self) : if self.gdl : return self.gdl elif self.name : return self.name.GDL() else : return None def setGDL(self, name) : self.gdl = name def readAP(self, elem, font) : self.uid = elem.get('UID', None) for p in elem.iterfind('property') : n = p.get('name') if n == 'GDLName' : self.setGDL(p.get('value')) elif n.startswith('GDL_') : self.gdl_properties[n[4:]] = p.get('value') else : self.properties[n] = p.get('value') for p in elem.iterfind('point') : l = p.find('location') self.setAnchor(ap_gr(p.get('type')), int(l.get('x', 0)), int(l.get('y', 0))) p = elem.find('note') if p is not None and p.text : self.comment = p.text if 'classes' in self.properties : for c in self.properties['classes'].split() : if c not in self.classes : self.classes.add(c) font.addGlyphClass(c, self, editable = True) def METHOD_NAME(self, elem, font, autoGdlFile) : e = SubElement(elem, 'glyph') if self.psname : e.set('PSName', self.psname) if self.uid : e.set('UID', self.uid) if self.gid is not None : e.set('GID', str(self.gid)) ce = None if 'classes' in self.properties and self.properties['classes'].strip() : tempClasses = self.properties['classes'] self.properties['classes'] = " ".join(font.filterAutoClasses(self.properties['classes'].split(), autoGdlFile)) for k in sorted(self.anchors.keys()) : v = self.anchors[k] p = SubElement(e, 'point') p.set('type', gr_ap(k)) p.text = "\n " l = SubElement(p, 'location') l.set('x', str(v[0])) l.set('y', str(v[1])) l.tail = "\n " if ce is not None : ce.tail = "\n " ce = p for k in sorted(self.gdl_properties.keys()) : if k == "*skipPasses*" : continue # not set in GDL v = self.gdl_properties[k] if v : p = SubElement(e, 'property') p.set('name', 'GDL_' + k) p.set('value', v) if ce is not None : ce.tail = "\n " ce = p if self.gdl and (not self.name or self.gdl != self.name.GDL()) : p = SubElement(e, 'property') p.set('name', 'GDLName') p.set('value', self.GDLName()) if ce is not None : ce.tail = "\n " ce = p for k in sorted(self.properties.keys()) : v = self.properties[k] if v : p = SubElement(e, 'property') p.set('name', k) p.set('value', v) if ce is not None : ce.tail = "\n " ce = p if self.comment : p = SubElement(e, 'note') p.text = self.comment if ce is not None : ce.tail = "\n " ce = p if 'classes' in self.properties and self.properties['classes'].strip() : self.properties['classes'] = tempClasses if ce is not None : ce.tail = "\n" e.text = "\n " e.tail = "\n" return e def isMakeGDLSpecialClass(name) : # if re.match(r'^cn?(Takes)?.*?Dia$', name) : return True # if name.startswith('clig') : return True # if name.startswith('cno_') : return True if re.match(r'^\*GC\d+\*$', name) : return True # auto-pseudo glyph with name = *GCXXXX* return False
null
1,449
from socket import gaierror from unittest import TestCase from unittest.mock import patch from pcs.common.corosync_conf import ( CorosyncNodeAddressDto, CorosyncNodeDto, ) from pcs.common.types import CorosyncNodeAddressType from pcs.lib.corosync.node import ( CorosyncNode, CorosyncNodeAddress, get_address_type, ) class AddrsPlain(TestCase): @staticmethod def _fixture_no_addrs(): return CorosyncNode("node1", [], "1") @staticmethod def _fixture_one_addr(): return CorosyncNode( "node1", [CorosyncNodeAddress("10.0.0.1", "1")], "1" ) @staticmethod def _fixture_several_addrs(): return CorosyncNode( "node1", [ CorosyncNodeAddress("10.0.0.0", "0"), CorosyncNodeAddress("10.0.0.1", "1"), CorosyncNodeAddress("10.0.0.4", "4"), CorosyncNodeAddress("10.0.0.3", "3"), ], "1", ) def test_no_addrs_set_get_one(self): node = self._fixture_no_addrs() self.assertEqual(None, node.addr_plain_for_link("0")) def test_no_addrs_set_get_all(self): node = self._fixture_no_addrs() self.assertEqual([], node.addrs_plain()) def test_no_addrs_set_get_all_except(self): node = self._fixture_no_addrs() self.assertEqual([], node.addrs_plain(except_link="0")) def test_one_addr_set_get_it(self): node = self._fixture_one_addr() self.assertEqual("10.0.0.1", node.addr_plain_for_link("1")) def test_one_addr_set_get_another(self): node = self._fixture_one_addr() self.assertEqual(None, node.addr_plain_for_link(1)) self.assertEqual(None, node.addr_plain_for_link("2")) def test_one_addr_set_get_all(self): node = self._fixture_one_addr() self.assertEqual(["10.0.0.1"], node.addrs_plain()) def test_one_addr_set_get_all_except_it(self): node = self._fixture_one_addr() self.assertEqual([], node.addrs_plain(except_link="1")) def test_one_addr_set_get_all_except_another(self): node = self._fixture_one_addr() self.assertEqual(["10.0.0.1"], node.addrs_plain(except_link=1)) self.assertEqual(["10.0.0.1"], node.addrs_plain(except_link="2")) def METHOD_NAME(self): node = self._fixture_several_addrs() self.assertEqual("10.0.0.1", node.addr_plain_for_link("1")) def test_several_addrs_set_get_another(self): node = self._fixture_several_addrs() self.assertEqual(None, node.addr_plain_for_link(1)) self.assertEqual(None, node.addr_plain_for_link("2")) def test_several_addrs_set_get_all(self): node = self._fixture_several_addrs() self.assertEqual( ["10.0.0.0", "10.0.0.1", "10.0.0.4", "10.0.0.3"], node.addrs_plain() ) def test_several_addrs_set_get_all_except_one(self): node = self._fixture_several_addrs() self.assertEqual( ["10.0.0.0", "10.0.0.4", "10.0.0.3"], node.addrs_plain(except_link="1"), ) def test_several_addrs_set_get_all_except_another(self): node = self._fixture_several_addrs() self.assertEqual( ["10.0.0.0", "10.0.0.1", "10.0.0.4", "10.0.0.3"], node.addrs_plain(except_link=1), ) self.assertEqual( ["10.0.0.0", "10.0.0.1", "10.0.0.4", "10.0.0.3"], node.addrs_plain(except_link="2"), ) class CorosyncNodeToDto(TestCase): def test_no_addrs(self): self.assertEqual( CorosyncNode("node1", [], "1").to_dto(), CorosyncNodeDto("node1", "1", []), ) def test_all_addr_types(self): self.assertEqual( CorosyncNode( "node1", [ CorosyncNodeAddress("10.0.0.1", "0"), CorosyncNodeAddress("node1.domain", "1"), CorosyncNodeAddress("fe80::5054:ff:fe81:1", "2"), ], "1", ).to_dto(), CorosyncNodeDto( "node1", "1", [ CorosyncNodeAddressDto( "10.0.0.1", "0", CorosyncNodeAddressType.IPV4 ), CorosyncNodeAddressDto( "node1.domain", "1", CorosyncNodeAddressType.FQDN ), CorosyncNodeAddressDto( "fe80::5054:ff:fe81:1", "2", CorosyncNodeAddressType.IPV6, ), ], ), ) class GetAddrType(TestCase): def assert_call(self, expected_type, addr, resolve): self.assertEqual(expected_type, get_address_type(addr, resolve=resolve)) def test_ipv4_resolve_false(self): self.assert_call(CorosyncNodeAddressType.IPV4, "10.0.0.1", False) def test_ipv6_resolve_false(self): self.assert_call( CorosyncNodeAddressType.IPV6, "fe80::5054:ff:fe81:1", False ) def test_fqdn_resolve_false(self): self.assert_call(CorosyncNodeAddressType.FQDN, "node1.domain", False) def test_ipv4_resolve_true(self): self.assert_call(CorosyncNodeAddressType.IPV4, "10.0.0.1", True) def test_ipv6_resolve_true(self): self.assert_call( CorosyncNodeAddressType.IPV6, "fe80::5054:ff:fe81:1", True ) @patch("pcs.lib.corosync.node.socket.getaddrinfo") def test_fqdn_resolvable(self, mock_getaddrinfo): mock_getaddrinfo.return_value = None self.assertEqual( CorosyncNodeAddressType.FQDN, get_address_type("node1.domain", resolve=True), ) @patch("pcs.lib.corosync.node.socket.getaddrinfo") def test_fqdn_unresolvable(self, mock_getaddrinfo): mock_getaddrinfo.side_effect = gaierror() self.assertEqual( CorosyncNodeAddressType.UNRESOLVABLE, get_address_type("node1.domain", resolve=True), )
null
1,450
from unittest import mock import pytest from aioresponses import aioresponses from ai.backend.client.config import API_VERSION from ai.backend.client.session import Session from ai.backend.testutils.mock import AsyncMock def build_url(config, path: str): base_url = config.endpoint.path.rstrip("/") query_path = path.lstrip("/") if len(path) > 0 else "" path = "{0}/{1}".format(base_url, query_path) canonical_url = config.endpoint.with_path(path) return canonical_url @pytest.fixture(scope="module", autouse=True) def api_version(): mock_nego_func = AsyncMock() mock_nego_func.return_value = API_VERSION with mock.patch("ai.backend.client.session._negotiate_api_version", mock_nego_func): yield def test_create_vfolder(): with Session() as session, aioresponses() as m: payload = { "id": "fake-vfolder-id", "name": "fake-vfolder-name", "host": "local", } m.post(build_url(session.config, "/folders"), status=201, payload=payload) resp = session.VFolder.create("fake-vfolder-name") assert resp == payload def test_create_vfolder_in_other_host(): with Session() as session, aioresponses() as m: payload = { "id": "fake-vfolder-id", "name": "fake-vfolder-name", "host": "fake-vfolder-host", } m.post(build_url(session.config, "/folders"), status=201, payload=payload) resp = session.VFolder.create("fake-vfolder-name", "fake-vfolder-host") assert resp == payload def test_list_vfolders(): with Session() as session, aioresponses() as m: payload = [ { "name": "fake-vfolder1", "id": "fake-vfolder1-id", "host": "fake-vfolder1-host", "is_owner": True, "permissions": "wd", }, { "name": "fake-vfolder2", "id": "fake-vfolder2-id", "host": "fake-vfolder2-host", "is_owner": True, "permissions": "wd", }, ] m.get(build_url(session.config, "/folders"), status=200, payload=payload) resp = session.VFolder.list() assert resp == payload def test_delete_vfolder(): with Session() as session, aioresponses() as m: vfolder_name = "fake-vfolder-name" m.delete(build_url(session.config, "/folders/{}".format(vfolder_name)), status=204) resp = session.VFolder(vfolder_name).delete() assert resp == {} def test_vfolder_get_info(): with Session() as session, aioresponses() as m: vfolder_name = "fake-vfolder-name" payload = { "name": vfolder_name, "id": "fake-vfolder-id", "host": "fake-vfolder-host", "numFiles": 5, "created": "2018-06-02 09:04:15.585917+00:00", "is_owner": True, "permission": "wd", } m.get( build_url(session.config, "/folders/{}".format(vfolder_name)), status=200, payload=payload, ) resp = session.VFolder(vfolder_name).info() assert resp == payload def test_vfolder_delete_files(): with Session() as session, aioresponses() as m: vfolder_name = "fake-vfolder-name" files = ["fake-file1", "fake-file2"] m.delete( build_url(session.config, "/folders/{}/delete-files".format(vfolder_name)), status=200, payload={}, ) resp = session.VFolder(vfolder_name).delete_files(files) assert resp == "{}" def test_vfolder_list_files(): with Session() as session, aioresponses() as m: vfolder_name = "fake-vfolder-name" payload = { "files": [ { "mode": "-rw-r--r--", "size": 4751244, "ctime": 1528277299.2744732, "mtime": 1528277299.2744732, "atime": 1528277300.7658687, "filename": "bigtxt.txt", }, { "mode": "-rw-r--r--", "size": 200000, "ctime": 1528333257.6576185, "mtime": 1528288069.625786, "atime": 1528332829.692922, "filename": "200000", }, ], "folder_path": "/mnt/local/1f6bd27fde1248cabfb50306ea83fc0a", } m.get( build_url(session.config, "/folders/{}/files".format(vfolder_name)), status=200, payload=payload, ) resp = session.VFolder(vfolder_name).list_files(".") assert resp == payload def test_vfolder_invite(): with Session() as session, aioresponses() as m: vfolder_name = "fake-vfolder-name" user_ids = ["[email protected]", "[email protected]"] payload = {"invited_ids": user_ids} m.post( build_url(session.config, "/folders/{}/invite".format(vfolder_name)), status=201, payload=payload, ) resp = session.VFolder(vfolder_name).invite("rw", user_ids) assert resp == payload def test_vfolder_invitations(): with Session() as session, aioresponses() as m: payload = { "invitations": [ { "id": "fake-invitation-id", "inviter": "[email protected]", "perm": "ro", "vfolder_id": "fake-vfolder-id", }, ], } m.get(build_url(session.config, "/folders/invitations/list"), status=200, payload=payload) resp = session.VFolder.invitations() assert resp == payload def test_vfolder_accept_invitation(): with Session() as session, aioresponses() as m: payload = { "msg": "User [email protected] now can access vfolder fake-vfolder-id", } m.post( build_url(session.config, "/folders/invitations/accept"), status=200, payload=payload ) resp = session.VFolder.accept_invitation("inv-id") assert resp == payload def METHOD_NAME(): with Session() as session, aioresponses() as m: payload = {"msg": "Vfolder invitation is deleted: fake-inv-id."} m.delete( build_url(session.config, "/folders/invitations/delete"), status=200, payload=payload ) resp = session.VFolder.delete_invitation("inv-id") assert resp == payload def test_vfolder_clone(): with Session() as session, aioresponses() as m: source_vfolder_name = "fake-source-vfolder-name" target_vfolder_name = "fake-target-vfolder-name" payload = { "target_name": target_vfolder_name, "target_host": "local", "permission": "rw", "usage_mode": "general", } m.post( build_url(session.config, "/folders/{}/clone".format(source_vfolder_name)), status=201, payload=payload, ) resp = session.VFolder(source_vfolder_name).clone(target_vfolder_name) assert resp == payload
null
1,451
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkr_kvstore.endpoint import endpoint_data class DescribePriceRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'R-kvstore', '2015-01-01', 'DescribePrice','redisa') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ResourceOwnerId(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_NodeType(self): # String return self.get_query_params().get('NodeType') def set_NodeType(self, NodeType): # String self.add_query_param('NodeType', NodeType) def get_Instances(self): # String return self.get_query_params().get('Instances') def set_Instances(self, Instances): # String self.add_query_param('Instances', Instances) def get_CouponNo(self): # String return self.get_query_params().get('CouponNo') def set_CouponNo(self, CouponNo): # String self.add_query_param('CouponNo', CouponNo) def get_InstanceClass(self): # String return self.get_query_params().get('InstanceClass') def set_InstanceClass(self, InstanceClass): # String self.add_query_param('InstanceClass', InstanceClass) def get_Capacity(self): # Long return self.get_query_params().get('Capacity') def set_Capacity(self, Capacity): # Long self.add_query_param('Capacity', Capacity) def get_SecurityToken(self): # String return self.get_query_params().get('SecurityToken') def set_SecurityToken(self, SecurityToken): # String self.add_query_param('SecurityToken', SecurityToken) def get_BusinessInfo(self): # String return self.get_query_params().get('BusinessInfo') def set_BusinessInfo(self, BusinessInfo): # String self.add_query_param('BusinessInfo', BusinessInfo) def get_Period(self): # Long return self.get_query_params().get('Period') def set_Period(self, Period): # Long self.add_query_param('Period', Period) def get_Quantity(self): # Long return self.get_query_params().get('Quantity') def set_Quantity(self, Quantity): # Long self.add_query_param('Quantity', Quantity) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def set_OwnerAccount(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_OrderParamOut(self): # String return self.get_query_params().get('OrderParamOut') def set_OrderParamOut(self, OrderParamOut): # String self.add_query_param('OrderParamOut', OrderParamOut) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_InstanceId(self): # String return self.get_query_params().get('InstanceId') def set_InstanceId(self, InstanceId): # String self.add_query_param('InstanceId', InstanceId) def get_ZoneId(self): # String return self.get_query_params().get('ZoneId') def METHOD_NAME(self, ZoneId): # String self.add_query_param('ZoneId', ZoneId) def get_ChargeType(self): # String return self.get_query_params().get('ChargeType') def set_ChargeType(self, ChargeType): # String self.add_query_param('ChargeType', ChargeType) def get_ForceUpgrade(self): # Boolean return self.get_query_params().get('ForceUpgrade') def set_ForceUpgrade(self, ForceUpgrade): # Boolean self.add_query_param('ForceUpgrade', ForceUpgrade) def get_OrderType(self): # String return self.get_query_params().get('OrderType') def set_OrderType(self, OrderType): # String self.add_query_param('OrderType', OrderType)
null
1,452
# Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os from logging import getLogger from os.path import isdir, join from pathlib import Path from types import SimpleNamespace from unittest.mock import patch import pytest from conda.auxlib.collection import AttrDict from conda.base.constants import PREFIX_MAGIC_FILE from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context, reset_context from conda.common.compat import on_win from conda.common.io import env_var from conda.common.path import expand, paths_equal from conda.core.envs_manager import ( _clean_environments_txt, get_user_environments_txt_file, list_all_known_prefixes, register_env, unregister_env, ) from conda.gateways.disk import mkdir_p from conda.gateways.disk.read import yield_lines from conda.gateways.disk.update import touch log = getLogger(__name__) def test_register_unregister_location_env(tmp_path: Path): user_environments_txt_file = get_user_environments_txt_file() if ( not os.path.exists(user_environments_txt_file) or user_environments_txt_file == os.devnull ): pytest.skip( f"user environments.txt file {user_environments_txt_file} does not exist" ) gascon_location = join(tmp_path, "gascon") touch(join(gascon_location, PREFIX_MAGIC_FILE), mkdir=True) assert gascon_location not in list_all_known_prefixes() touch(user_environments_txt_file, mkdir=True, sudo_safe=True) with env_var( "CONDA_REGISTER_ENVS", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol, ): register_env(gascon_location) assert gascon_location in yield_lines(user_environments_txt_file) assert ( len( tuple( x for x in yield_lines(user_environments_txt_file) if paths_equal(gascon_location, x) ) ) == 1 ) register_env(gascon_location) # should be completely idempotent assert ( len( tuple( x for x in yield_lines(user_environments_txt_file) if x == gascon_location ) ) == 1 ) unregister_env(gascon_location) assert gascon_location not in list_all_known_prefixes() unregister_env(gascon_location) # should be idempotent assert gascon_location not in list_all_known_prefixes() def test_prefix_cli_flag(tmp_path: Path): envs_dirs = ( join(tmp_path, "first-envs-dir"), join(tmp_path, "seconds-envs-dir"), ) with env_var( "CONDA_ENVS_DIRS", os.pathsep.join(envs_dirs), stack_callback=conda_tests_ctxt_mgmt_def_pol, ): # even if prefix doesn't exist, it can be a target prefix reset_context((), argparse_args=AttrDict(prefix="./blarg", func="create")) target_prefix = join(os.getcwd(), "blarg") assert context.target_prefix == target_prefix assert not isdir(target_prefix) def METHOD_NAME(tmp_path: Path): mkdir_p(join(tmp_path, "conda-meta")) touch(join(tmp_path, "conda-meta", "history")) doesnt_exist = join(tmp_path, "blarg") environments_txt_path = join(tmp_path, "environments.txt") with open(environments_txt_path, "w") as fh: fh.write(f"{tmp_path}\n") fh.write(f"{doesnt_exist}\n") cleaned_1 = _clean_environments_txt(environments_txt_path) assert cleaned_1 == (str(tmp_path),) with patch("conda.core.envs_manager._rewrite_environments_txt") as _rewrite_patch: cleaned_2 = _clean_environments_txt(environments_txt_path) assert cleaned_2 == (str(tmp_path),) assert _rewrite_patch.call_count == 0 @patch("conda.core.envs_manager.context") @patch("conda.core.envs_manager.get_user_environments_txt_file") @patch("conda.core.envs_manager._clean_environments_txt") def test_list_all_known_prefixes_with_permission_error( mock_clean_env, mock_get_user_env, mock_context, tmp_path ): # Mock context myenv_dir = tmp_path / "envs" myenv_dir.mkdir() mock_context.envs_dirs = str(myenv_dir) mock_context.root_prefix = "root_prefix" # Mock get_user_environments_txt_file to return a file env_txt_file = tmp_path / "environment.txt" touch(env_txt_file) mock_get_user_env.return_value = env_txt_file # Mock _clean_environments_txt to raise PermissionError mock_clean_env.side_effect = PermissionError() all_env_paths = list_all_known_prefixes() # On Windows, all_env_paths can contain more paths (like '\\Miniconda') assert "root_prefix" in all_env_paths @pytest.mark.skipif(on_win, reason="test is invalid on windows") @patch("conda.core.envs_manager.context") @patch("conda.core.envs_manager._clean_environments_txt") @patch("pwd.getpwall") @patch("conda.core.envs_manager.is_admin") def test_list_all_known_prefixes_with_none_values_error( mock_is_admin, mock_getpwall, mock_clean_env, mock_context, tmp_path ): """ Regression test for a bug first indentified in this issue: https://github.com/conda/conda/issues/12063 Tests to make sure that `None` values are filtered out of the `search_dirs` variable in the `list_all_known_prefixes` function. """ mock_is_admin.return_value = True mock_getpwall.return_value = [ SimpleNamespace(pw_dir=expand("~")), SimpleNamespace(pw_dir=None), ] mock_clean_env.return_value = [] mock_env_dir = tmp_path / "envs" mock_env_dir.mkdir() mock_context.envs_dirs = str(mock_env_dir) mock_context.root_prefix = str(tmp_path) results = list_all_known_prefixes() assert results == [mock_context.root_prefix]
null
1,453
"""These are some benchmark functions for MDP. """ from __future__ import print_function from builtins import str from builtins import range import mdp #from mdp.utils import symeig from mdp.utils import matmult as mult numx = mdp.numx numx_rand = mdp.numx_rand numx_fft = mdp.numx_fft ####### benchmark function def matmult_c_MDP_benchmark(dim): """ This benchmark multiplies two contiguous matrices using the MDP internal matrix multiplication routine. First argument matrix dimensionality""" a = numx_rand.random((dim,dim)) b = numx_rand.random((dim,dim)) mult(a,b) def matmult_c_scipy_benchmark(dim): """ This benchmark multiplies two contiguous matrices using the scipy internal matrix multiplication routine. First argument matrix dimensionality""" a = numx_rand.random((dim,dim)) b = numx_rand.random((dim,dim)) numx.dot(a,b) def matmult_n_MDP_benchmark(dim): """ This benchmark multiplies two non-contiguous matrices using the MDP internal matrix multiplication routine. First argument matrix dimensionality""" a = numx_rand.random((dim,dim)).T b = numx_rand.random((dim,dim)).T mult(a,b) def matmult_n_scipy_benchmark(dim): """ This benchmark multiplies two non-contiguous matrices using the scipy internal matrix multiplication routine. First argument matrix dimensionality""" a = numx_rand.random((dim,dim)).T b = numx_rand.random((dim,dim)).T numx.dot(a,b) def matmult_cn_MDP_benchmark(dim): """ This benchmark multiplies a contiguous matrix with a non-contiguous matrix using the MDP internal matrix multiplication routine. First argument matrix dimensionality""" a = numx_rand.random((dim,dim)).T b = numx_rand.random((dim,dim)) mult(a,b) def METHOD_NAME(dim): """ This benchmark multiplies a contiguous matrix with a non-contiguous matrix using the scipy internal matrix multiplication routine. First argument matrix dimensionality""" a = numx_rand.random((dim,dim)).T b = numx_rand.random((dim,dim)) numx.dot(a,b) def quadratic_expansion_benchmark(dim, len, times): """ This benchmark expands random data of shape (len, dim) 'times' times. Arguments: (dim,len,times).""" a = numx_rand.random((len,dim)) qnode = mdp.nodes.QuadraticExpansionNode() for i in range(times): qnode(a) def polynomial_expansion_benchmark(dim, len, degree, times): """ This benchmark expands random data of shape (len, dim) 'times' times in the space of polynomials of degree 'degree'. Arguments: (dim,len,degree,times).""" numx_rand.seed(4253529) a = numx_rand.random((len,dim)) pnode = mdp.nodes.PolynomialExpansionNode(degree) for i in range(times): pnode(a) # ISFA benchmark def _tobias_mix(src): mix = src.copy() mix[:,0]=(src[:,1]+3*src[:,0]+6)*numx.cos(1.5*numx.pi*src[:,0]) mix[:,1]=(src[:,1]+3*src[:,0]+6)*numx.sin(1.5*numx.pi*src[:,0]) return mix def _get_random_slow_sources(nsrc, distr_fun): # nsrc: number of sources # distr_fun: random numbers function src = distr_fun(size=(50000, nsrc)) fsrc = numx_fft.rfft(src, axis=0) # enforce different time scales for i in range(nsrc): fsrc[5000+(i+1)*1000:,i] = 0. src = numx_fft.irfft(fsrc,axis=0) return src def isfa_spiral_benchmark(): """ Apply ISFA to twisted data.""" numx_rand.seed(116599099) # create independent sources src = _get_random_slow_sources(2, numx_rand.laplace) # subtract mean and rescale between -1 and 1 src -= src.mean(axis=0) src /= abs(src).max() # apply nonlinear "twist" transformation exp_src = _tobias_mix(src) # train flow = mdp.Flow([mdp.nodes.PolynomialExpansionNode(5), mdp.nodes.SFANode(), mdp.nodes.ISFANode(lags=30, whitened=False, sfa_ica_coeff=[1.,300.], eps_contrast=1e-5, output_dim=2, verbose=False)]) flow.train(exp_src) def sfa_benchmark(): """ Apply SFA to twisted data.""" numx_rand.seed(424507) # create independent sources nsrc = 15 src = _get_random_slow_sources(nsrc, numx_rand.normal) src = src[:5000,:] src = mult(src, numx_rand.uniform(size=(nsrc, nsrc))) \ + numx_rand.uniform(size=nsrc) # train flow = mdp.Flow([mdp.nodes.PolynomialExpansionNode(3), mdp.nodes.PCANode(output_dim = 100), mdp.nodes.SFANode(output_dim = 30)]) #src = src.reshape(1000,5,nsrc) flow.train([None, [src], [src]]) #### benchmark tools # function used to measure time import time TIMEFUNC = time.time def timeit(func,*args,**kwargs): """Return function execution time in 1/100ths of a second.""" tstart = TIMEFUNC() func(*args,**kwargs) return (TIMEFUNC()-tstart)*100. def _random_seed(): import sys seed = int(numx_rand.randint(2**31-1)) numx_rand.seed(seed) sys.stderr.write("Random Seed: " + str(seed)+'\n') def run_benchmarks(bench_funcs, time_digits=15): results_str = '| %%s | %%%d.2f |' % time_digits label_str = '| %%s | %s |' % 'Time (sec/100)'.center(time_digits) tstart = TIMEFUNC() # loop over all benchmarks functions for func, args_list in bench_funcs: # number of combinations of arguments(cases) ncases = len(args_list) funcname = func.__name__[:-10] # loop over all cases for i in range(ncases): args = args_list[i] # format description string descr = funcname + str(tuple(args)) if i==0: # print summary table header descrlen = len(descr)+6 results_strlen = time_digits+descrlen+7 print('\nTiming results (%s, %d cases):' % (funcname, ncases)) print(func.__doc__) print('+'+'-'*(results_strlen-2)+'+') print(label_str % 'Description'.center(descrlen)) print('+'+'-'*(results_strlen-2)+'+') # execute function t = timeit(func, *args) # print summary table entry print(results_str % (descr.center(descrlen), t)) # print summary table tail print('+'+'-'*(results_strlen-2)+'+') print('\nTotal running time:', (TIMEFUNC()-tstart)*100.) ####### /benchmark function POLY_EXP_ARGS = [(2**i, 100, j, 200) for j in range(2,5) for i in range(2,4)] #if mdp.numx_description in ['symeig', 'scipy', 'numpy']: # MUL_MTX_DIMS = [[2**i] for i in xrange(4,11)] # # list of (benchmark function, list of arguments) # BENCH_FUNCS = [(matmult_c_MDP_benchmark, MUL_MTX_DIMS), # (matmult_c_scipy_benchmark, MUL_MTX_DIMS), # (matmult_n_MDP_benchmark, MUL_MTX_DIMS), # (matmult_n_scipy_benchmark, MUL_MTX_DIMS), # (matmult_cn_MDP_benchmark, MUL_MTX_DIMS), # (matmult_cn_scipy_benchmark, MUL_MTX_DIMS), # (polynomial_expansion_benchmark, POLY_EXP_ARGS)] #else: # BENCH_FUNCS = [(polynomial_expansion_benchmark, POLY_EXP_ARGS)] BENCH_FUNCS = [(polynomial_expansion_benchmark, POLY_EXP_ARGS), (isfa_spiral_benchmark, [[]]), (sfa_benchmark, [[]])] def get_benchmarks(): return BENCH_FUNCS if __name__ == "__main__": print("Running benchmarks: ") run_benchmarks(get_benchmarks())
null
1,454
# Copyright (c) 2017-2022 The Molecular Sciences Software Institute, Virginia Tech # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ''' Conversion of basis sets to cfour/aces2/genbas format ''' import math from .. import lut, manip, sort, printing def _cfour_exp(e): '''Formats an exponent for CFour''' return e.replace('E', 'D') + ' ' def _cfour_coef(c): '''Formats a coefficient for CFour''' return c.replace('E', 'D') + ' ' def _aces_exp(e): '''Formats an exponent for AcesII''' e = float(e) # Some basis sets have negative exponents??? mag = int(math.log(abs(e), 10)) mag = max(mag, 1) # Make room for the negative sign if e < 0.0: mag += 1 # Number of decimal places to show ndec = min(7, 14 - 2 - mag) fmtstr = '{{:14.{}f}}'.format(ndec) s = fmtstr.format(e) # Trim a single trailing zero if there is one # and our string takes up all 14 characters if s[0] != ' ' and s[-1] == '0': s = ' ' + s[:-1] return s def _aces_coef(c): '''Formats a coefficient for AcesII''' c = float(c) return '{:10.7f} '.format(c) def _print_columns(data, ncol): s = '' for i in range(0, len(data), ncol): s += ''.join(data[i:i + ncol]) + '\n' return s def _write_genbas_internal(basis, exp_formatter, coef_formatter): # Uncontract all, then make general basis = manip.make_general(basis, False, True) basis = sort.sort_basis(basis, False) # Elements for which we have electron basis electron_elements = [k for k, v in basis['elements'].items() if 'electron_shells' in v] # Elements for which we have ECP ecp_elements = [k for k, v in basis['elements'].items() if 'ecp_potentials' in v] s = '\n' if electron_elements: # Electron Basis for z in electron_elements: data = basis['elements'][z] sym = lut.element_sym_from_Z(z).upper() nshell = len(data['electron_shells']) s += '{}:{}\n'.format(sym, basis['name']) s += basis['description'] + '\n' s += '\n' s += '{:>3}\n'.format(nshell) s_am = '' s_ngen = '' s_nprim = '' for sh in data['electron_shells']: s_am += '{:>5}'.format(sh['angular_momentum'][0]) s_ngen += '{:>5}'.format(len(sh['coefficients'])) s_nprim += '{:>5}'.format(len(sh['exponents'])) s += s_am + '\n' s += s_ngen + '\n' s += s_nprim + '\n' s += '\n' for shell in data['electron_shells']: exponents = [exp_formatter(x) for x in shell['exponents']] coefficients = [[coef_formatter(x) for x in y] for y in shell['coefficients']] coefficients = list(map(list, zip(*coefficients))) s += _print_columns(exponents, 5) + '\n' for c in coefficients: s += _print_columns(c, 7) s += '\n' # Write out ECP if ecp_elements: s += '\n\n! Effective core Potentials\n' for z in ecp_elements: data = basis['elements'][z] sym = lut.element_sym_from_Z(z).upper() max_ecp_am = max([x['angular_momentum'][0] for x in data['ecp_potentials']]) max_ecp_amchar = lut.amint_to_char([max_ecp_am]).lower() # Sort lowest->highest, then put the highest at the beginning ecp_list = sorted(data['ecp_potentials'], key=lambda x: x['angular_momentum']) ecp_list.insert(0, ecp_list.pop()) s += '*\n' s += '{}:{}\n'.format(sym, basis['name']) s += '# ' + basis['description'] + '\n' s += '*\n' s += ' NCORE = {} LMAX = {}\n'.format(data['ecp_electrons'], max_ecp_am) for pot in ecp_list: rexponents = pot['r_exponents'] gexponents = pot['gaussian_exponents'] coefficients = pot['coefficients'] am = pot['angular_momentum'] amchar = lut.amint_to_char(am).lower() if am[0] == max_ecp_am: s += '{}\n'.format(amchar) else: s += '{}-{}\n'.format(amchar, max_ecp_amchar) point_places = [6, 18, 25] s += printing.write_matrix([*coefficients, rexponents, gexponents], point_places) #for p in range(len(rexponents)): # s += '{} {} {};\n'.format(gexponents[p], rexponents[p], coefficients[0][p]) s += '*\n' return s def METHOD_NAME(basis): '''Converts a basis set to cfour ''' # March 2019 # Format determined from http://slater.chemie.uni-mainz.de/cfour/index.php?n=Main.NewFormatOfAnEntryInTheGENBASFile return _write_genbas_internal(basis, _cfour_exp, _cfour_coef) def write_aces2(basis): '''Converts a basis set to cfour ''' # March 2019 # Format determined from http://slater.chemie.uni-mainz.de/cfour/index.php?n=Main.OldFormatOfAnEntryInTheGENBASFile return _write_genbas_internal(basis, _aces_exp, _aces_coef)
null
1,455
import codecs import logging import urllib.parse from abc import ABC, abstractmethod from io import SEEK_END, SEEK_SET from pathlib import Path from typing import BinaryIO, Callable, List, Optional, Tuple import charset_normalizer from charset_normalizer import CharsetMatch from ggshield.utils.git_shell import Filemode logger = logging.getLogger(__name__) # Our worse encoding (UTF-32) would take 4 bytes to encode ASCII, where UTF-8 would take # only 1. If the file is longer than byte_size / UTF8_TO_WORSE_OTHER_ENCODING_RATIO, no # need to look into it: it's too big. UTF8_TO_WORSE_OTHER_ENCODING_RATIO = 4 class DecodeError(Exception): """ Raised when a Scannable cannot determine the encoding of its content. Similar to UnicodeDecodeError, but easier to instantiate. """ pass class Scannable(ABC): """Base class for content that can be scanned by GGShield""" def __init__(self, filemode: Filemode = Filemode.FILE): self.filemode = filemode @property @abstractmethod def url(self) -> str: """Act as a unique identifier for the Scannable. May use custom protocols if required.""" raise NotImplementedError @property @abstractmethod def filename(self) -> str: """To avoid breakage with the rest of the code base, implementations currently return the URL or path of the instance for now, but it should really return just the filename, or be removed.""" # TODO: make this really return the filename, or remove it raise NotImplementedError @property @abstractmethod def path(self) -> Path: raise NotImplementedError @abstractmethod def is_longer_than(self, max_utf8_encoded_size: int) -> bool: """Return true if the length of the *utf-8 encoded* content is greater than `max_utf8_encoded_size`. When possible, implementations must try to answer this without reading all content. Raise `DecodeError` if the content cannot be decoded. """ raise NotImplementedError @property @abstractmethod def content(self) -> str: """Return the decoded content of the scannable""" raise NotImplementedError def __repr__(self) -> str: return f"<{self.__class__.__name__} url={self.url} filemode={self.filemode}>" @staticmethod def _decode_bytes( raw_document: bytes, charset_match: Optional[CharsetMatch] = None ) -> Tuple[str, int]: """Low level helper function to decode bytes using `charset_match`. If `charset_match` is not provided, tries to determine it itself. Returns a tuple of (decoded_content, utf8_encoded_size). Raises DecodeError if the document cannot be decoded.""" if charset_match is None: charset_match = charset_normalizer.from_bytes(raw_document).best() if charset_match is None: # This means we were not able to detect the encoding raise DecodeError # Special case for utf_8 + BOM: `bytes.decode()` does not skip the BOM, so do it # ourselves if charset_match.encoding == "utf_8" and raw_document.startswith( codecs.BOM_UTF8 ): raw_document = raw_document[len(codecs.BOM_UTF8) :] content = raw_document.decode(charset_match.encoding, errors="replace") if charset_match.encoding in {"utf_8", "ascii"}: # The document is already in UTF-8, no need to encode it as UTF-8 to # determine UTF-8 encoded size. utf8_encoded_size = len(raw_document) else: utf8_encoded_size = len(content.encode(errors="replace")) return content, utf8_encoded_size @staticmethod def METHOD_NAME( fp: BinaryIO, max_utf8_encoded_size: int ) -> Tuple[bool, Optional[str], Optional[int]]: """Helper function to implement is_longer_than() for file-based Scannable classes. Returns a tuple of: - True if file is longer than `size`, False otherwise - The decoded content as a string if the file has been fully read, None otherwise - The utf8-encoded size if we know it, None otherwise Raises DecodeError if the file cannot be decoded. """ # Get the byte size assert fp.seekable() byte_size = fp.seek(0, SEEK_END) if byte_size > max_utf8_encoded_size * UTF8_TO_WORSE_OTHER_ENCODING_RATIO: # Even if the file used the worst encoding (UTF-32), encoding the content of # this file as UTF-8 would produce a file longer than # `max_utf8_encoded_size`, so bail out return True, None, None # Determine the encoding fp.seek(0, SEEK_SET) charset_matches = charset_normalizer.from_fp(fp) charset_match = charset_matches.best() if charset_match is None: raise DecodeError if charset_match.encoding in {"utf_8", "ascii"}: # Shortcut: the content is already in UTF-8 (or ASCII, which is a subset of # utf-8), no need to decode anything return byte_size > max_utf8_encoded_size, None, byte_size # We can't know if the file is longer without reading its content, do it now fp.seek(0, SEEK_SET) content, utf8_encoded_size = Scannable._decode_bytes(fp.read(), charset_match) if utf8_encoded_size > max_utf8_encoded_size: return True, None, utf8_encoded_size else: # We read the whole file, keep it return False, content, utf8_encoded_size class StringScannable(Scannable): """Implementation of Scannable for content already loaded in memory""" def __init__(self, url: str, content: str, filemode: Filemode = Filemode.FILE): super().__init__(filemode) self._url = url self._path: Optional[Path] = None self._content = content self._utf8_encoded_size = None @property def url(self) -> str: return self._url @property def filename(self) -> str: return str(self._url) @property def path(self) -> Path: if self._path is None: result = urllib.parse.urlparse(self._url) self._path = Path(result.path) return self._path def is_longer_than(self, max_utf8_encoded_size: int) -> bool: if self._utf8_encoded_size is None: self._utf8_encoded_size = len(self._content.encode(errors="replace")) return self._utf8_encoded_size > max_utf8_encoded_size @property def content(self) -> str: return self._content class Files: """ Files is a list of files. Useful for directory scanning. TODO: Rename to something like ScannableCollection: this class is no longer limited to holding File instances. """ def __init__(self, files: List[Scannable]): self._files = files @property def files(self) -> List[Scannable]: """The list of files owned by this instance. The same filename can appear twice, in case of a merge commit.""" return self._files @property def paths(self) -> List[Path]: """Convenience property to list paths in the same order as files""" return [x.path for x in self.files] def __repr__(self) -> str: return f"<Files files={self.files}>" def apply_filter(self, filter_func: Callable[[Scannable], bool]) -> "Files": return Files([file for file in self.files if filter_func(file)])
null
1,456
# coding=utf-8 # Copyright 2023 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """WikiHow Datasets.""" import csv import os import re from etils import epath import tensorflow_datasets.public_api as tfds _CITATION = """ @misc{koupaee2018wikihow, title={WikiHow: A Large Scale Text Summarization Dataset}, author={Mahnaz Koupaee and William Yang Wang}, year={2018}, eprint={1810.09305}, archivePrefix={arXiv}, primaryClass={cs.CL} } """ _DESCRIPTION = """ WikiHow is a new large-scale dataset using the online WikiHow (http://www.wikihow.com/) knowledge base. There are two features: - text: wikihow answers texts. - headline: bold lines as summary. There are two separate versions: - all: consisting of the concatenation of all paragraphs as the articles and the bold lines as the reference summaries. - sep: consisting of each paragraph and its summary. Download "wikihowAll.csv" and "wikihowSep.csv" from https://github.com/mahnazkoupaee/WikiHow-Dataset and place them in manual folder https://www.tensorflow.org/datasets/api_docs/python/tfds/download/DownloadConfig. Train/validation/test splits are provided by the authors. Preprocessing is applied to remove short articles (abstract length < 0.75 article length) and clean up extra commas. """ _DOCUMENT = "text" _SUMMARY = "headline" _URLS = { "train": "https://raw.githubusercontent.com/mahnazkoupaee/WikiHow-Dataset/master/all_train.txt", "validation": "https://raw.githubusercontent.com/mahnazkoupaee/WikiHow-Dataset/master/all_val.txt", "test": "https://raw.githubusercontent.com/mahnazkoupaee/WikiHow-Dataset/master/all_test.txt", } class WikihowConfig(tfds.core.BuilderConfig): """BuilderConfig for Wikihow.""" def __init__(self, *, filename=None, **kwargs): """BuilderConfig for Wikihow. Args: filename: filename of different configs for the dataset. **kwargs: keyword arguments forwarded to super. """ # Version 1.1.0 remove empty document and summary strings. # Version 1.2.0 add train validation test split, add cleaning & filtering. super(WikihowConfig, self).__init__( version=tfds.core.Version("1.2.0"), **kwargs ) self.filename = filename class Wikihow(tfds.core.GeneratorBasedBuilder): """WikiHow: A Large Scale Text Summarization Dataset.""" MANUAL_DOWNLOAD_INSTRUCTIONS = """\ Links to files can be found on https://github.com/mahnazkoupaee/WikiHow-Dataset Please download both wikihowAll.csv and wikihowSep.csv. """ BUILDER_CONFIGS = [ WikihowConfig( name="all", filename="wikihowAll.csv", description=( "Use the concatenation of all paragraphs as the articles" " and the bold lines as the reference summaries" ), ), WikihowConfig( name="sep", filename="wikihowSep.csv", description="use each paragraph and its summary.", ), ] def _info(self): feature_names = [_DOCUMENT, _SUMMARY, "title"] if self.builder_config.name == "sep": feature_names.extend(["overview", "sectionLabel"]) return tfds.core.DatasetInfo( builder=self, description=_DESCRIPTION, features=tfds.features.FeaturesDict( {k: tfds.features.Text() for k in feature_names} ), supervised_keys=(_DOCUMENT, _SUMMARY), homepage="https://github.com/mahnazkoupaee/WikiHow-Dataset", citation=_CITATION, ) def _split_generators(self, dl_manager): """Returns SplitGenerators.""" dl_path = dl_manager.download(_URLS) titles = {k: set() for k in dl_path} for k, path in dl_path.items(): with epath.Path(path).open() as f: for line in f: titles[k].add(line.strip()) return [ tfds.core.SplitGenerator( name=tfds.Split.TRAIN, gen_kwargs={ "path": os.path.join( dl_manager.manual_dir, self.builder_config.filename ), "title_set": titles["train"], }, ), tfds.core.SplitGenerator( name=tfds.Split.VALIDATION, gen_kwargs={ "path": os.path.join( dl_manager.manual_dir, self.builder_config.filename ), "title_set": titles["validation"], }, ), tfds.core.SplitGenerator( name=tfds.Split.TEST, gen_kwargs={ "path": os.path.join( dl_manager.manual_dir, self.builder_config.filename ), "title_set": titles["test"], }, ), ] def _generate_examples(self, path=None, title_set=None): """Yields examples.""" with epath.Path(path).open() as f: reader = csv.reader(f) headers = next(reader) if self.builder_config.name == "all" and headers != [ "headline", "title", "text", ]: raise ValueError("Mismatched header in WikiAll.txt") if self.builder_config.name == "sep" and headers != [ "overview", "headline", "text", "sectionLabel", "title", ]: raise ValueError("Mismatched header in WikiSep.txt") key2id = {key: i for i, key in enumerate(headers)} for i, line in enumerate(reader): # skip empty line or insufficient line. if len(line) == len(key2id): summary = line[key2id[_SUMMARY]].strip() document = line[key2id[_DOCUMENT]].strip() summary, document = METHOD_NAME(summary, document) if summary and document: if line[key2id["title"]].strip().replace(" ", "") in title_set: d = { k: line[v].strip() for k, v in key2id.items() if k not in [_SUMMARY, _DOCUMENT] } d[_DOCUMENT] = document d[_SUMMARY] = summary yield i, d # This functions follow data processing acoording to original paper at # https://github.com/mahnazkoupaee/WikiHow-Dataset/blob/master/process.py def METHOD_NAME(abstract, article): """Remove short article and clean up commas in abstract and article.""" # a threshold is used to remove short articles with long summaries # as well as articles with no summary if len(abstract) < (0.75 * len(article)): # remove extra commas in abstracts abstract = abstract.replace(".,", ".") # remove extra commas in articles article = re.sub(r"[.]+[\n]+[,]", ".\n", article) return abstract, article else: return "", ""
null
1,457
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkarms.endpoint import endpoint_data class CreateOrUpdateAlertRuleRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'ARMS', '2019-08-08', 'CreateOrUpdateAlertRule','arms') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_AlertGroup(self): # Long return self.get_body_params().get('AlertGroup') def set_AlertGroup(self, AlertGroup): # Long self.add_body_params('AlertGroup', AlertGroup) def get_AlertName(self): # String return self.get_body_params().get('AlertName') def set_AlertName(self, AlertName): # String self.add_body_params('AlertName', AlertName) def get_AlertStatus(self): # String return self.get_body_params().get('AlertStatus') def set_AlertStatus(self, AlertStatus): # String self.add_body_params('AlertStatus', AlertStatus) def get_Annotations(self): # String return self.get_body_params().get('Annotations') def set_Annotations(self, Annotations): # String self.add_body_params('Annotations', Annotations) def get_Duration(self): # Long return self.get_body_params().get('Duration') def set_Duration(self, Duration): # Long self.add_body_params('Duration', Duration) def get_MetricsKey(self): # String return self.get_body_params().get('MetricsKey') def set_MetricsKey(self, MetricsKey): # String self.add_body_params('MetricsKey', MetricsKey) def get_AlertRuleContent(self): # String return self.get_body_params().get('AlertRuleContent') def set_AlertRuleContent(self, AlertRuleContent): # String self.add_body_params('AlertRuleContent', AlertRuleContent) def get_PromQL(self): # String return self.get_body_params().get('PromQL') def set_PromQL(self, PromQL): # String self.add_body_params('PromQL', PromQL) def get_Level(self): # String return self.get_body_params().get('Level') def set_Level(self, Level): # String self.add_body_params('Level', Level) def get_AutoAddNewApplication(self): # Boolean return self.get_body_params().get('AutoAddNewApplication') def set_AutoAddNewApplication(self, AutoAddNewApplication): # Boolean self.add_body_params('AutoAddNewApplication', AutoAddNewApplication) def get_Filters(self): # String return self.get_body_params().get('Filters') def set_Filters(self, Filters): # String self.add_body_params('Filters', Filters) def get_ClusterId(self): # String return self.get_body_params().get('ClusterId') def set_ClusterId(self, ClusterId): # String self.add_body_params('ClusterId', ClusterId) def get_Message(self): # String return self.get_body_params().get('Message') def set_Message(self, Message): # String self.add_body_params('Message', Message) def get_NotifyStrategy(self): # String return self.get_body_params().get('NotifyStrategy') def set_NotifyStrategy(self, NotifyStrategy): # String self.add_body_params('NotifyStrategy', NotifyStrategy) def get_Labels(self): # String return self.get_body_params().get('Labels') def set_Labels(self, Labels): # String self.add_body_params('Labels', Labels) def get_Tagss(self): # RepeatList return self.get_body_params().get('Tags') def set_Tagss(self, Tags): # RepeatList for depth1 in range(len(Tags)): if Tags[depth1].get('Value') is not None: self.add_body_params('Tags.' + str(depth1 + 1) + '.Value', Tags[depth1].get('Value')) if Tags[depth1].get('Key') is not None: self.add_body_params('Tags.' + str(depth1 + 1) + '.Key', Tags[depth1].get('Key')) def get_AlertType(self): # String return self.get_body_params().get('AlertType') def set_AlertType(self, AlertType): # String self.add_body_params('AlertType', AlertType) def METHOD_NAME(self): # String return self.get_body_params().get('AlertCheckType') def set_AlertCheckType(self, AlertCheckType): # String self.add_body_params('AlertCheckType', AlertCheckType) def get_MetricsType(self): # String return self.get_body_params().get('MetricsType') def set_MetricsType(self, MetricsType): # String self.add_body_params('MetricsType', MetricsType) def get_AlertId(self): # Long return self.get_body_params().get('AlertId') def set_AlertId(self, AlertId): # Long self.add_body_params('AlertId', AlertId) def get_Pids(self): # String return self.get_body_params().get('Pids') def set_Pids(self, Pids): # String self.add_body_params('Pids', Pids)
null
1,458
#!/usr/bin/env python # # Copyright 2006, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test for Google Test's break-on-failure mode. A user can ask Google Test to seg-fault when an assertion fails, using either the GTEST_BREAK_ON_FAILURE environment variable or the --gtest_break_on_failure flag. This script tests such functionality by invoking googletest-break-on-failure-unittest_ (a program written with Google Test) with different environments and command line flags. """ import os from googletest.test import gtest_test_utils # Constants. IS_WINDOWS = os.name == 'nt' # The environment variable for enabling/disabling the break-on-failure mode. BREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE' # The command line flag for enabling/disabling the break-on-failure mode. BREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure' # The environment variable for enabling/disabling the throw-on-failure mode. THROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE' # The environment variable for enabling/disabling the catch-exceptions mode. CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS' # Path to the googletest-break-on-failure-unittest_ program. EXE_PATH = gtest_test_utils.GetTestExecutablePath( 'googletest-break-on-failure-unittest_') environ = gtest_test_utils.environ SetEnvVar = gtest_test_utils.SetEnvVar # Tests in this file run a Google-Test-based test program and expect it # to terminate prematurely. Therefore they are incompatible with # the premature-exit-file protocol by design. Unset the # premature-exit filepath to prevent Google Test from creating # the file. SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None) def Run(command): """Runs a command; returns 1 if it was killed by a signal, or 0 otherwise.""" p = gtest_test_utils.Subprocess(command, env=environ) if p.terminated_by_signal: return 1 else: return 0 # The tests. class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase): """Tests using the GTEST_BREAK_ON_FAILURE environment variable or the --gtest_break_on_failure flag to turn assertion failures into segmentation faults. """ def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault): """Runs googletest-break-on-failure-unittest_ and verifies that it does (or does not) have a seg-fault. Args: env_var_value: value of the GTEST_BREAK_ON_FAILURE environment variable; None if the variable should be unset. flag_value: value of the --gtest_break_on_failure flag; None if the flag should not be present. expect_seg_fault: 1 if the program is expected to generate a seg-fault; 0 otherwise. """ SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value) if env_var_value is None: env_var_value_msg = ' is not set' else: env_var_value_msg = '=' + env_var_value if flag_value is None: flag = '' elif flag_value == '0': flag = '--%s=0' % BREAK_ON_FAILURE_FLAG else: flag = '--%s' % BREAK_ON_FAILURE_FLAG command = [EXE_PATH] if flag: command.append(flag) if expect_seg_fault: should_or_not = 'should' else: should_or_not = 'should not' has_seg_fault = Run(command) SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None) msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' % (BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command), should_or_not)) self.assert_(has_seg_fault == expect_seg_fault, msg) def testDefaultBehavior(self): """Tests the behavior of the default mode.""" self.RunAndVerify(env_var_value=None, flag_value=None, expect_seg_fault=0) def testEnvVar(self): """Tests using the GTEST_BREAK_ON_FAILURE environment variable.""" self.RunAndVerify(env_var_value='0', flag_value=None, expect_seg_fault=0) self.RunAndVerify(env_var_value='1', flag_value=None, expect_seg_fault=1) def testFlag(self): """Tests using the --gtest_break_on_failure flag.""" self.RunAndVerify(env_var_value=None, flag_value='0', expect_seg_fault=0) self.RunAndVerify(env_var_value=None, flag_value='1', expect_seg_fault=1) def testFlagOverridesEnvVar(self): """Tests that the flag overrides the environment variable.""" self.RunAndVerify(env_var_value='0', flag_value='0', expect_seg_fault=0) self.RunAndVerify(env_var_value='0', flag_value='1', expect_seg_fault=1) self.RunAndVerify(env_var_value='1', flag_value='0', expect_seg_fault=0) self.RunAndVerify(env_var_value='1', flag_value='1', expect_seg_fault=1) def METHOD_NAME(self): """Tests that gtest_break_on_failure overrides gtest_throw_on_failure.""" SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1') try: self.RunAndVerify(env_var_value=None, flag_value='1', expect_seg_fault=1) finally: SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None) if IS_WINDOWS: def testCatchExceptionsDoesNotInterfere(self): """Tests that gtest_catch_exceptions doesn't interfere.""" SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1') try: self.RunAndVerify(env_var_value='1', flag_value='1', expect_seg_fault=1) finally: SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None) if __name__ == '__main__': gtest_test_utils.Main()
null
1,459
from __future__ import print_function from acq4.devices.Device import Device, DeviceTask, TaskGui from acq4.util import Qt from six.moves import range class Screen(Device): """ Device used for screen output. Currently, this is only used to blank the screen temporarily to avoid contaminating sensitive imaging operations. In the future, this device may be extended to provide visual stimulation (perhaps via psychopy) """ sigBlankScreen = Qt.Signal(object, object) # bool blank/unblank, QWaitCondition def __init__(self, dm, config, name): Device.__init__(self, dm, config, name) dm.declareInterface(name, ['screen'], self) self.blanker = ScreenBlanker() self.sigBlankScreen.connect(self.blankRequested, Qt.Qt.QueuedConnection) def taskInterface(self, taskRunner): return ScreenTaskGui(self, taskRunner) def createTask(self, cmd, parentTask): return ScreenTask(self, cmd, parentTask) def METHOD_NAME(self, blank=True, timeout=10.): isGuiThread = Qt.QThread.currentThread() == Qt.QCoreApplication.instance().thread() if isGuiThread: if blank: self.blanker.blank() else: self.blanker.unBlank() else: mutex = Qt.QMutex() mutex.lock() waitCond = Qt.QWaitCondition() self.sigBlankScreen.emit(blank, waitCond) if not waitCond.wait(mutex, int(timeout*1000)): raise Exception("Screen blanker threaded request timed out.") def unBlankScreen(self): self.METHOD_NAME(False) def blankRequested(self, blank, waitCond): try: if blank: self.METHOD_NAME() else: self.unBlankScreen() finally: waitCond.wakeAll() class Black(Qt.QWidget): """ make a black rectangle to fill screen when "blanking" """ def paintEvent(self, event): p = Qt.QPainter(self) brush = Qt.QBrush(Qt.QColor(0,0,0)) p.fillRect(self.rect(), brush) p.end() class ScreenBlanker: """ Perform the blanking on ALL screens that we can detect. This is so that extraneous light does not leak into the detector during acquisition. """ def __init__(self): self.widgets = [] def blank(self): d = Qt.QApplication.desktop() for i in range(d.screenCount()): # look for all screens w = Black() self.widgets.append(w) # make a black widget sg = d.screenGeometry(i) # get the screen size w.move(sg.x(), sg.y()) # put the widget there w.showFullScreen() # duh Qt.QApplication.processEvents() # make it so def __exit__(self, *args): pass #for w in self.widgets: #w.hide() # just take them away #self.widgets = [] def unBlank(self): for w in self.widgets: w.hide() # just take them away self.widgets = [] class ScreenTask(DeviceTask): def __init__(self, dev, cmd, parentTask): DeviceTask.__init__(self, dev, cmd, parentTask) self.cmd = cmd def configure(self): pass def start(self): ## possibly nothing required here, DAQ will start recording. if self.cmd['blank']: self.dev.METHOD_NAME() def stop(self, abort=False): self.dev.unBlankScreen() class ScreenTaskGui(TaskGui): def __init__(self, dev, taskRunner): TaskGui.__init__(self, dev, taskRunner) self.layout = Qt.QGridLayout() self.setLayout(self.layout) self.blankCheck = Qt.QCheckBox("Blank Screen") self.layout.addWidget(self.blankCheck) def saveState(self): return {'blank': self.blankCheck.isChecked()} def restoreState(self, state): self.blankCheck.setChecked(state['blank']) def listSequence(self): return [] def generateTask(self, params=None): return self.saveState()
null
1,460
"""Annif backend using the fastText classifier""" from __future__ import annotations import collections import os.path from typing import TYPE_CHECKING, Any import fasttext import annif.util from annif.exception import NotInitializedException, NotSupportedException from annif.suggestion import SubjectSuggestion from . import backend, mixins if TYPE_CHECKING: from fasttext.FastText import _FastText from numpy import ndarray from annif.corpus.document import DocumentCorpus class FastTextBackend(mixins.ChunkingBackend, backend.AnnifBackend): """fastText backend for Annif""" name = "fasttext" FASTTEXT_PARAMS = { "lr": float, "lrUpdateRate": int, "dim": int, "ws": int, "epoch": int, "minCount": int, "neg": int, "wordNgrams": int, "loss": str, "bucket": int, "minn": int, "maxn": int, "thread": int, "t": float, "pretrainedVectors": str, } DEFAULT_PARAMETERS = { "dim": 100, "lr": 0.25, "epoch": 5, "loss": "hs", } MODEL_FILE = "fasttext-model" TRAIN_FILE = "fasttext-train.txt" # defaults for uninitialized instances _model = None def default_params(self) -> dict[str, Any]: params = backend.AnnifBackend.DEFAULT_PARAMETERS.copy() params.update(mixins.ChunkingBackend.DEFAULT_PARAMETERS) params.update(self.DEFAULT_PARAMETERS) return params @staticmethod def _load_model(path: str) -> _FastText: # monkey patch fasttext.FastText.eprint to avoid spurious warning # see https://github.com/facebookresearch/fastText/issues/1067 orig_eprint = fasttext.FastText.eprint fasttext.FastText.eprint = lambda x: None model = fasttext.load_model(path) # restore the original eprint fasttext.FastText.eprint = orig_eprint return model def initialize(self, parallel: bool = False) -> None: if self._model is None: path = os.path.join(self.datadir, self.MODEL_FILE) self.debug("loading fastText model from {}".format(path)) if os.path.exists(path): self._model = self._load_model(path) self.debug("loaded model {}".format(str(self._model))) self.debug("dim: {}".format(self._model.get_dimension())) else: raise NotInitializedException( "model {} not found".format(path), backend_id=self.backend_id ) @staticmethod def _id_to_label(subject_id: int) -> str: return "__label__{:d}".format(subject_id) def _label_to_subject_id(self, label: str) -> int: labelnum = label.replace("__label__", "") return int(labelnum) def _write_train_file(self, corpus: DocumentCorpus, filename: str) -> None: with open(filename, "w", encoding="utf-8") as trainfile: for doc in corpus.documents: text = self.METHOD_NAME(doc.text) if text == "": continue labels = [self._id_to_label(sid) for sid in doc.subject_set] if labels: print(" ".join(labels), text, file=trainfile) else: self.warning(f'no labels for document "{doc.text}"') def METHOD_NAME(self, text: str) -> str: return " ".join(self.project.analyzer.tokenize_words(text)) def _create_train_file( self, corpus: DocumentCorpus, ) -> None: self.info("creating fastText training file") annif.util.atomic_save( corpus, self.datadir, self.TRAIN_FILE, method=self._write_train_file ) def _create_model(self, params: dict[str, Any], jobs: int) -> None: self.info("creating fastText model") trainpath = os.path.join(self.datadir, self.TRAIN_FILE) modelpath = os.path.join(self.datadir, self.MODEL_FILE) params = { param: self.FASTTEXT_PARAMS[param](val) for param, val in params.items() if param in self.FASTTEXT_PARAMS } if jobs != 0: # jobs set by user to non-default value params["thread"] = jobs self.debug("Model parameters: {}".format(params)) self._model = fasttext.train_supervised(trainpath, **params) self._model.save_model(modelpath) def _train( self, corpus: DocumentCorpus, params: dict[str, Any], jobs: int = 0, ) -> None: if corpus != "cached": if corpus.is_empty(): raise NotSupportedException( "training backend {} with no documents".format(self.backend_id) ) self._create_train_file(corpus) else: self.info("Reusing cached training data from previous run.") self._create_model(params, jobs) def _predict_chunks( self, chunktexts: list[str], limit: int ) -> tuple[list[list[str]], list[ndarray]]: return self._model.predict( list( filter( None, [self.METHOD_NAME(chunktext) for chunktext in chunktexts] ) ), limit, ) def _suggest_chunks( self, chunktexts: list[str], params: dict[str, Any] ) -> list[SubjectSuggestion]: limit = int(params["limit"]) chunklabels, chunkscores = self._predict_chunks(chunktexts, limit) label_scores = collections.defaultdict(float) for labels, scores in zip(chunklabels, chunkscores): for label, score in zip(labels, scores): label_scores[label] += score best_labels = sorted( [(score, label) for label, score in label_scores.items()], reverse=True ) results = [] for score, label in best_labels[:limit]: results.append( SubjectSuggestion( subject_id=self._label_to_subject_id(label), score=score / len(chunktexts), ) ) return results
null
1,461
import pytest from collections import namedtuple from django.core.files.storage import default_storage as storage from django.core.files.uploadedfile import SimpleUploadedFile from pulpcore.plugin.exceptions import ( UnsupportedDigestValidationError, MissingDigestValidationError, ) from pulpcore.plugin.models import ( Artifact, Content, ContentArtifact, PulpTemporaryFile, Remote, RemoteArtifact, ) @pytest.mark.django_db def test_create_read_delete_content(tmp_path): artifact_path = tmp_path / "artifact-tmp" artifact_path.write_text("Temp Artifact File") artifact = Artifact.init_and_validate(str(artifact_path)) artifact.save() content = Content.objects.create() artifact_file = storage.open(artifact.file.name) content_artifact = ContentArtifact.objects.create( artifact=artifact, content=content, relative_path=artifact_file.name ) assert Content.objects.filter(pk=content.pk).exists() assert ( ContentArtifact.objects.get(pk=content_artifact.pk).content.pk == Content.objects.get(pk=content.pk).pk ) Content.objects.filter(pk=content.pk).delete() assert not Content.objects.filter(pk=content.pk).exists() @pytest.mark.django_db def test_storage_location(tmp_path, settings): if settings.DEFAULT_FILE_STORAGE != "pulpcore.app.models.storage.FileSystem": pytest.skip("Skipping test for nonlocal storage.") tf = tmp_path / "ab" tf.write_bytes(b"temp file test") temp_file = PulpTemporaryFile(file=str(tf)) temp_file.save() assert temp_file.file.name.startswith("tmp/files/") name = temp_file.file.file.name assert name.startswith("/var/lib/pulp/media/tmp/files"), name @pytest.mark.django_db def test_read_temp_file(tmp_path): tf = tmp_path / "ab" tf.write_bytes(b"temp file test") temp_file = PulpTemporaryFile(file=str(tf)) temp_file.save() assert b"temp file test" in temp_file.file.read() @pytest.mark.django_db def test_artifact_forbidden_digest(monkeypatch): monkeypatch.setattr(Artifact, "FORBIDDEN_DIGESTS", {"md5"}) monkeypatch.setattr(Artifact, "DIGEST_FIELDS", {"sha512", "sha384", "sha224", "sha1", "sha256"}) with pytest.raises(UnsupportedDigestValidationError): a = Artifact( file=SimpleUploadedFile("test_filename", b"test content"), sha512="asdf", sha384="asdf", sha224="asdf", sha1="asdf", sha256="asdf", size=1024, ) a.md5 = "asdf" a.save() @pytest.mark.django_db def test_artifact_forgotten_digest(monkeypatch): monkeypatch.setattr(Artifact, "FORBIDDEN_DIGESTS", {"md5"}) monkeypatch.setattr(Artifact, "DIGEST_FIELDS", {"sha512", "sha384", "sha224", "sha1", "sha256"}) with pytest.raises(MissingDigestValidationError): a = Artifact( file=SimpleUploadedFile("test_filename", b"test content"), sha512="asdf", sha384="asdf", sha224="asdf", sha1="asdf", sha256="asdf", size=1024, ) a.sha224 = None a.save() @pytest.fixture def remote_artifact_setup(monkeypatch, db): monkeypatch.setattr(Artifact, "FORBIDDEN_DIGESTS", {"md5", "sha1"}) monkeypatch.setattr(Artifact, "DIGEST_FIELDS", {"sha512", "sha384", "sha224", "sha256"}) content = Content.objects.create() content_artifact = ContentArtifact.objects.create( artifact=None, content=content, relative_path="ca" ) remote = Remote.objects.create(url="http://example.org/") return namedtuple("RemoteArtifactSetup", "content content_artifact remote")( content, content_artifact, remote ) def test_remoteartifact_with_no_checksums(remote_artifact_setup): ra = RemoteArtifact( url="http://example.org/file", size=1024, md5=None, sha1=None, sha224=None, sha256="", sha384=None, sha512=None, content_artifact=remote_artifact_setup.content_artifact, remote=remote_artifact_setup.remote, ) ra.validate_checksums() def test_remoteartifact_with_allowed_checksums(remote_artifact_setup): ra = RemoteArtifact( url="http://example.org/file", size=1024, md5="", sha1=None, sha224=None, sha256="sha256checksum", sha384=None, sha512=None, content_artifact=remote_artifact_setup.content_artifact, remote=remote_artifact_setup.remote, ) ra.validate_checksums() def test_remoteartifact_with_allowed_and_forbidden_checksums(remote_artifact_setup): ra = RemoteArtifact( url="http://example.org/file", size=1024, md5="", sha1="sha1checksum", sha224=None, sha256="sha256checksum", sha384=None, sha512=None, content_artifact=remote_artifact_setup.content_artifact, remote=remote_artifact_setup.remote, ) ra.validate_checksums() def METHOD_NAME(remote_artifact_setup): with pytest.raises(UnsupportedDigestValidationError): ra = RemoteArtifact( url="http://example.org/file", size=1024, md5="md5checksum", sha1=None, sha224=None, sha256="", sha384=None, sha512=None, content_artifact=remote_artifact_setup.content_artifact, remote=remote_artifact_setup.remote, ) ra.validate_checksums()
null
1,462
# Copyright 2020 Camptocamp (http://www.camptocamp.com). # @author Simone Orsi <[email protected]> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from .common import TestMultiUserPartnerDomainCommon class TestMultiUserServicePartnerDomain(TestMultiUserPartnerDomainCommon): """Test partner domains for services""" # Company can see always everything def test_user_company__record_id(self): self._test_partner_records( self.company, addresses=self.all_partners + self.all_addresses, orders=self.all_orders, invoices=self.all_invoices, ) def test_user_company__parent_id(self): self.backend.multi_user_records_policy = "parent_id" self._test_partner_records( self.company, addresses=self.all_partners + self.all_addresses, orders=self.all_orders, invoices=self.all_invoices, ) def test_user_company__main_partner_id(self): self.backend.multi_user_records_policy = "main_partner_id" self._test_partner_records( self.company, addresses=self.all_partners + self.all_addresses, orders=self.all_orders, invoices=self.all_invoices, ) def test_user_direct_child_of_company__record_id(self): """Direct child sees only its own records.""" # For this test, consider all addresses as private self.all_public_addresses.write({"invader_address_share_policy": "private"}) self._test_user_direct_child_of_company__record_id(self.user) self._test_user_direct_child_of_company__record_id(self.user2) self._test_user_direct_child_of_company__record_id(self.user3) def test_user_direct_child_of_company__parent_id(self): """Direct child sees only its own records and the ones from direct parent.""" self.backend.multi_user_records_policy = "parent_id" # Case 1: User 1 sees only its own records, the ones from its direct parent, # and the publicly shared ones from their sibilings partner = self.user expected_addresses = ( partner | self.company | self._get_addresses(self.company, policy="public") | self._get_addresses(self.user, policy="private") | self.all_public_addresses ) self._test_user_direct_child_of_company__parent_id( partner, self.company, expected_addresses=expected_addresses ) # Case 2: User 2 sees only its own records, the ones from its direct parent, # and the publicly shared ones from their sibilings partner = self.user2 expected_addresses = ( partner | self.company | self._get_addresses(self.company, policy="public") | self._get_addresses(self.user2, policy="private") | self.all_public_addresses ) self._test_user_direct_child_of_company__parent_id( partner, self.company, expected_addresses=expected_addresses ) # Case 3: User 3 sees only its own records, the ones from its direct parent, # and the publicly shared ones from their sibilings partner = self.user3 expected_addresses = ( partner | self.user2 | self._get_addresses(self.user3) | self._get_addresses(self.user2, policy="public") ) self._test_user_direct_child_of_company__parent_id( partner, self.user2, expected_addresses=expected_addresses, ) # Case 4: main partner is the parent in this case # but if we set the company, then they can see records from the company as well self.user3_binding.main_partner_id = self.company expected_addresses |= ( self.company | self._get_addresses(self.company, policy="public") | self.all_public_addresses ) self._test_user_direct_child_of_company__parent_id( partner, self.company + self.user2, expected_addresses=expected_addresses, ) def METHOD_NAME(self): """Direct child sees only its own records and the ones from main partner.""" self.backend.multi_user_records_policy = "main_partner_id" # Case 1: User 1 sees only its own records, the ones from its main partner, # and the publicly shared ones from their sibilings partner = self.user expected_addresses = ( partner | self.company | self._get_addresses(self.company, policy="public") | self._get_addresses(self.user, policy="private") | self.all_public_addresses ) self._test_user_direct_child_of_company__parent_id( partner, self.company, expected_addresses=expected_addresses ) # Case 2: User 2 sees only its own records, the ones from its main partner, # and the publicly shared ones from their sibilings partner = self.user2 expected_addresses = ( partner | self.company | self._get_addresses(self.company, policy="public") | self._get_addresses(self.user2, policy="private") | self.all_public_addresses ) self._test_user_direct_child_of_company__parent_id( partner, self.company, expected_addresses=expected_addresses ) # Case 3: Change User 2's main partner to user 1 self.user2_binding.main_partner_id = self.user expected_addresses = ( partner | self._get_addresses(self.user2) | self._get_addresses(self.user, policy="public") | self.user ) self._test_user_direct_child_of_company__parent_id( partner, self.user, expected_addresses=expected_addresses, ) self.assertEqual(self.user3_binding.main_partner_id, self.user2) # Case 4: User 3 sees only its own records, the ones from its main partner, # and the publicly shared ones from their sibilings partner = self.user3 expected_addresses = ( partner | self._get_addresses(self.user3) | self._get_addresses(self.user2, policy="public") | self.user2 ) self._test_user_direct_child_of_company__parent_id( partner, self.user2, expected_addresses=expected_addresses, ) # Case 5: Change User 3's main partner to company self.user3_binding.main_partner_id = self.company expected_addresses = ( partner | self.company | self._get_addresses(self.company, policy="public") | self._get_addresses(self.user3, policy="private") | self.all_public_addresses ) self._test_user_direct_child_of_company__parent_id( partner, self.company, expected_addresses=expected_addresses )
null
1,463
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkecs.endpoint import endpoint_data class DescribeSecurityGroupsRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'DescribeSecurityGroups','ecs') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ResourceOwnerId(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_FuzzyQuery(self): # Boolean return self.get_query_params().get('FuzzyQuery') def set_FuzzyQuery(self, FuzzyQuery): # Boolean self.add_query_param('FuzzyQuery', FuzzyQuery) def get_SecurityGroupId(self): # String return self.get_query_params().get('SecurityGroupId') def set_SecurityGroupId(self, SecurityGroupId): # String self.add_query_param('SecurityGroupId', SecurityGroupId) def get_IsQueryEcsCount(self): # Boolean return self.get_query_params().get('IsQueryEcsCount') def set_IsQueryEcsCount(self, IsQueryEcsCount): # Boolean self.add_query_param('IsQueryEcsCount', IsQueryEcsCount) def get_NetworkType(self): # String return self.get_query_params().get('NetworkType') def set_NetworkType(self, NetworkType): # String self.add_query_param('NetworkType', NetworkType) def get_SecurityGroupName(self): # String return self.get_query_params().get('SecurityGroupName') def set_SecurityGroupName(self, SecurityGroupName): # String self.add_query_param('SecurityGroupName', SecurityGroupName) def get_PageNumber(self): # Integer return self.get_query_params().get('PageNumber') def set_PageNumber(self, PageNumber): # Integer self.add_query_param('PageNumber', PageNumber) def get_ResourceGroupId(self): # String return self.get_query_params().get('ResourceGroupId') def set_ResourceGroupId(self, ResourceGroupId): # String self.add_query_param('ResourceGroupId', ResourceGroupId) def get_NextToken(self): # String return self.get_query_params().get('NextToken') def METHOD_NAME(self, NextToken): # String self.add_query_param('NextToken', NextToken) def get_PageSize(self): # Integer return self.get_query_params().get('PageSize') def set_PageSize(self, PageSize): # Integer self.add_query_param('PageSize', PageSize) def get_Tags(self): # RepeatList return self.get_query_params().get('Tag') def set_Tags(self, Tag): # RepeatList for depth1 in range(len(Tag)): if Tag[depth1].get('Value') is not None: self.add_query_param('Tag.' + str(depth1 + 1) + '.Value', Tag[depth1].get('Value')) if Tag[depth1].get('Key') is not None: self.add_query_param('Tag.' + str(depth1 + 1) + '.Key', Tag[depth1].get('Key')) def get_DryRun(self): # Boolean return self.get_query_params().get('DryRun') def set_DryRun(self, DryRun): # Boolean self.add_query_param('DryRun', DryRun) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def set_OwnerAccount(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_SecurityGroupIds(self): # String return self.get_query_params().get('SecurityGroupIds') def set_SecurityGroupIds(self, SecurityGroupIds): # String self.add_query_param('SecurityGroupIds', SecurityGroupIds) def get_SecurityGroupType(self): # String return self.get_query_params().get('SecurityGroupType') def set_SecurityGroupType(self, SecurityGroupType): # String self.add_query_param('SecurityGroupType', SecurityGroupType) def get_VpcId(self): # String return self.get_query_params().get('VpcId') def set_VpcId(self, VpcId): # String self.add_query_param('VpcId', VpcId) def get_MaxResults(self): # Integer return self.get_query_params().get('MaxResults') def set_MaxResults(self, MaxResults): # Integer self.add_query_param('MaxResults', MaxResults)
null
1,464
# coding=utf-8 # Copyright 2023 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The Multi-Genre NLI Corpus.""" import os from tensorflow_datasets.core.utils.lazy_imports_utils import tensorflow as tf import tensorflow_datasets.public_api as tfds _CITATION = """\ @InProceedings{N18-1101, author = "Williams, Adina and Nangia, Nikita and Bowman, Samuel", title = "A Broad-Coverage Challenge Corpus for Sentence Understanding through Inference", booktitle = "Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Volume 1 (Long Papers)", year = "2018", publisher = "Association for Computational Linguistics", pages = "1112--1122", location = "New Orleans, Louisiana", url = "http://aclweb.org/anthology/N18-1101" } """ _DESCRIPTION = """\ The Multi-Genre Natural Language Inference (MultiNLI) corpus is a crowd-sourced collection of 433k sentence pairs annotated with textual entailment information. The corpus is modeled on the SNLI corpus, but differs in that covers a range of genres of spoken and written text, and supports a distinctive cross-genre generalization evaluation. The corpus served as the basis for the shared task of the RepEval 2017 Workshop at EMNLP in Copenhagen. """ class MultiNLI(tfds.core.GeneratorBasedBuilder): """MultiNLI: The Stanford Question Answering Dataset. Version 1.1.""" VERSION = tfds.core.Version("1.1.0") def METHOD_NAME(self): return tfds.core.DatasetInfo( builder=self, description=_DESCRIPTION, features=tfds.features.FeaturesDict({ "premise": tfds.features.Text(), "hypothesis": tfds.features.Text(), "label": tfds.features.ClassLabel( names=["entailment", "neutral", "contradiction"] ), }), # No default supervised_keys (as we have to pass both premise # and hypothesis as input). supervised_keys=None, homepage="https://www.nyu.edu/projects/bowman/multinli/", citation=_CITATION, ) def _split_generators(self, dl_manager): downloaded_dir = dl_manager.download_and_extract( "https://cims.nyu.edu/~sbowman/multinli/multinli_1.0.zip" ) mnli_path = os.path.join(downloaded_dir, "multinli_1.0") train_path = os.path.join(mnli_path, "multinli_1.0_train.txt") matched_validation_path = os.path.join( mnli_path, "multinli_1.0_dev_matched.txt" ) mismatched_validation_path = os.path.join( mnli_path, "multinli_1.0_dev_mismatched.txt" ) return [ tfds.core.SplitGenerator( name=tfds.Split.TRAIN, gen_kwargs={"filepath": train_path} ), tfds.core.SplitGenerator( name="validation_matched", gen_kwargs={"filepath": matched_validation_path}, ), tfds.core.SplitGenerator( name="validation_mismatched", gen_kwargs={"filepath": mismatched_validation_path}, ), ] def _generate_examples(self, filepath): """Generate mnli examples. Args: filepath: a string Yields: dictionaries containing "premise", "hypothesis" and "label" strings """ for idx, line in enumerate(tf.io.gfile.GFile(filepath, "rb")): if idx == 0: continue # skip header line = tf.compat.as_text(line.strip()) split_line = line.split("\t") # Examples not marked with a three out of five consensus are marked with # "-" and should not be used in standard evaluations. if split_line[0] == "-": continue # Works for both splits even though dev has some extra human labels. yield idx, { "premise": split_line[5], "hypothesis": split_line[6], "label": split_line[0], }
null
1,465
from rest_framework import generics, permissions as drf_permissions from rest_framework.exceptions import NotFound from django.core.exceptions import ObjectDoesNotExist from api.base.views import JSONAPIBaseView from api.base.filters import ListFilterMixin from api.base.pagination import NoMaxPageSizePagination from api.base.parsers import JSONAPIRelationshipParser, JSONAPIRelationshipParserForRegularJSON from api.base import permissions as base_permissions from api.subjects.serializers import SubjectSerializer, SubjectsRelationshipSerializer from api.taxonomies.utils import optimize_subject_query from osf.models import Subject from framework.auth.oauth_scopes import CoreScopes class SubjectMixin(object): """Mixin with convenience methods for retrieving the current subject based on the current URL. By default, fetches the current subject based on the subject_id kwarg. """ subject_lookup_url_kwarg = 'subject_id' def get_subject(self, check_object_permissions=True): subject_id = self.kwargs[self.subject_lookup_url_kwarg] try: subject = optimize_subject_query(Subject.objects).get(_id=subject_id) except ObjectDoesNotExist: raise NotFound if check_object_permissions: self.check_object_permissions(self.request, subject) return subject class BaseResourceSubjectsList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): permission_classes = () required_read_scopes = [] required_write_scopes = [CoreScopes.NULL] serializer_class = SubjectSerializer model = Subject view_category = '' view_name = '' ordering = ('-id',) def get_resource(self): raise NotImplementedError() def get_queryset(self): return self.get_resource().subjects.all() class SubjectRelationshipBaseView(JSONAPIBaseView, generics.RetrieveUpdateAPIView): """ Relationship Endpoint for Resource -> Subjects Relationship Used to update the subjects on a resource ##Actions ###Update Method: PUT || PATCH URL: /links/self Query Params: <none> Body (JSON): { "data": [{ "type": "subjects", # required "id": <subject_id> # required }] } Success: 200 This requires write permissions on the resource. This will delete subjects not listed, meaning a data: [] payload deletes all the subjects. """ serializer_class = SubjectsRelationshipSerializer parser_classes = (JSONAPIRelationshipParser, JSONAPIRelationshipParserForRegularJSON, ) def get_resource(self, check_object_permissions=True): raise NotImplementedError() def get_object(self): resource = self.get_resource(check_object_permissions=False) obj = { 'data': resource.subjects.all(), 'self': resource, } self.check_object_permissions(self.request, obj) return obj class SubjectList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin): permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, ) required_read_scopes = [CoreScopes.ALWAYS_PUBLIC] required_write_scopes = [CoreScopes.NULL] serializer_class = SubjectSerializer pagination_class = NoMaxPageSizePagination view_category = 'subjects' view_name = 'subject-list' ordering = ('is_other', '-id',) def get_default_queryset(self): return optimize_subject_query(Subject.objects.all()) def get_queryset(self): return self.get_queryset_from_request() # overrides FilterMixin def METHOD_NAME(self, key, field_name, operation): if field_name == 'parent': if operation['value'] not in (list(), tuple()): operation['source_field_name'] = 'parent___id' else: if len(operation['value']) > 1: operation['source_field_name'] = 'parent___id__in' elif len(operation['value']) == 1: operation['source_field_name'] == 'parent___id' operation['value'] = operation['value'][0] else: operation['source_field_name'] = 'parent__isnull' operation['value'] = True class SubjectDetail(JSONAPIBaseView, generics.RetrieveAPIView, SubjectMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/subjects_read). """ permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, ) serializer_class = SubjectSerializer required_read_scopes = [CoreScopes.ALWAYS_PUBLIC] required_write_scopes = [CoreScopes.NULL] view_category = 'subjects' view_name = 'subject-detail' def get_object(self): return self.get_subject() class SubjectChildrenList(JSONAPIBaseView, generics.ListAPIView, SubjectMixin, ListFilterMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/subject_children_list). """ permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, ) required_read_scopes = [CoreScopes.ALWAYS_PUBLIC] required_write_scopes = [CoreScopes.NULL] serializer_class = SubjectSerializer pagination_class = NoMaxPageSizePagination view_category = 'subjects' view_name = 'subject-children' ordering = ('-id',) def get_default_queryset(self): subject = self.get_subject() return optimize_subject_query(subject.children.all()) def get_queryset(self): return self.get_queryset_from_request()
null
1,466
#!/usr/bin/env python """Script to filter out old warnings from doxygen.""" import sys import re class DoxygenWarning(object): """Doxygen warning class.""" def __init__(self, firstline, filename, warning): self.firstline = firstline self.filename = filename self.warning = warning self.otherlines = [] def equals_ignoring_path_and_line_number(self, other): """Return true if warnings have same filename and warning message.""" if self.filename != other.filename: return False if self.warning != other.warning: return False return self.otherlines == other.otherlines def print_with_prefix(self, prefix): print(prefix + self.firstline) for line in self.otherlines: print(prefix + ' ' + line) class WarningsList(object): """List of Doxygen warnings.""" def __init__(self, all_lines): """Create a list of the warnings in this file.""" self.warnings_list = [] current = None warning_start_expr = re.compile(r'[^:]+/([^:/]+):\d+: (warning.*$)') for line in all_lines: if line.isspace(): continue # Allow comments in the list of expected warnings. if line.startswith('#'): continue matched = warning_start_expr.match(line) if matched: filename = matched.group(1) warning = matched.group(2) current = DoxygenWarning(line.strip(), filename, warning) self.warnings_list.append(current) elif line.startswith(' '): current.otherlines.append(line.strip()) else: # Warnings are usually of the form [path:line: warning:...] # (and the warnings about too many nodes have been filtered out). # Treat any unexpected input lines as warnings. current = DoxygenWarning(line, '', line) self.warnings_list.append(current) def METHOD_NAME(self, warning): """Check if a similar warning is in this list.""" for other in self.warnings_list: if warning.equals_ignoring_path_and_line_number(other): return True return False def print_warnings_not_in_other_list(self, other, prefix): """Check if this a subset of other, and print anything missing.""" missing_element_found = False for warning in self.warnings_list: if not other.METHOD_NAME(warning): missing_element_found = True warning.print_with_prefix(prefix) return missing_element_found def ignore_too_many_nodes(all_lines): """Filter out lines about graphs with too many nodes.""" too_many_nodes_expr = re.compile( r'warning: Include(d by)? graph for .* not generated, too many ' + r'nodes( \(\d+\), threshold is 60)?\. Consider increasing ' + r'DOT_GRAPH_MAX_NODES\.') return [x for x in all_lines if not too_many_nodes_expr.match(x)] def filter_expected_warnings(expected_warnings_path): """Filter lines from stdin and print to stdout.""" with open(expected_warnings_path, "r") as warnings_file: expected_warnings = WarningsList(warnings_file.readlines()) new_warnings = WarningsList(ignore_too_many_nodes(sys.stdin.readlines())) # print unexpected warnings unexpected_warning_found = new_warnings.print_warnings_not_in_other_list( expected_warnings, '') # print expected warnings which aren't found expected_warning_not_found = expected_warnings.print_warnings_not_in_other_list( new_warnings, '-') if expected_warning_not_found: print('NOTE: Warnings prefixed with \'-\' are expected ' + 'warnings which weren\'t found.') print(' Please update the list of expected warnings.') return unexpected_warning_found or expected_warning_not_found if __name__ == "__main__": if len(sys.argv) != 2: print('usage: filter_expected_warnings.py <expected_warnings_file>') print('(warnings from stdin are filtered and printed to stdout)') sys.exit(1) problem_found = filter_expected_warnings(sys.argv[1]) sys.exit(1 if problem_found else 0)
null
1,467
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkdrds.endpoint import endpoint_data class CreateDrdsInstanceRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Drds', '2019-01-23', 'CreateDrdsInstance','drds') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_IsAutoRenew(self): return self.get_query_params().get('IsAutoRenew') def set_IsAutoRenew(self,IsAutoRenew): self.add_query_param('IsAutoRenew',IsAutoRenew) def get_ClientToken(self): return self.get_query_params().get('ClientToken') def set_ClientToken(self,ClientToken): self.add_query_param('ClientToken',ClientToken) def get_Description(self): return self.get_query_params().get('Description') def set_Description(self,Description): self.add_query_param('Description',Description) def get_Type(self): return self.get_query_params().get('Type') def set_Type(self,Type): self.add_query_param('Type',Type) def get_Duration(self): return self.get_query_params().get('Duration') def set_Duration(self,Duration): self.add_query_param('Duration',Duration) def get_ResourceGroupId(self): return self.get_query_params().get('ResourceGroupId') def set_ResourceGroupId(self,ResourceGroupId): self.add_query_param('ResourceGroupId',ResourceGroupId) def get_isHa(self): return self.get_query_params().get('isHa') def set_isHa(self,isHa): self.add_query_param('isHa',isHa) def get_MySQLVersion(self): return self.get_query_params().get('MySQLVersion') def set_MySQLVersion(self,MySQLVersion): self.add_query_param('MySQLVersion',MySQLVersion) def get_InstanceSeries(self): return self.get_query_params().get('InstanceSeries') def set_InstanceSeries(self,InstanceSeries): self.add_query_param('InstanceSeries',InstanceSeries) def get_MasterInstId(self): return self.get_query_params().get('MasterInstId') def set_MasterInstId(self,MasterInstId): self.add_query_param('MasterInstId',MasterInstId) def get_Quantity(self): return self.get_query_params().get('Quantity') def set_Quantity(self,Quantity): self.add_query_param('Quantity',Quantity) def get_Specification(self): return self.get_query_params().get('Specification') def set_Specification(self,Specification): self.add_query_param('Specification',Specification) def get_VswitchId(self): return self.get_query_params().get('VswitchId') def set_VswitchId(self,VswitchId): self.add_query_param('VswitchId',VswitchId) def METHOD_NAME(self): return self.get_query_params().get('VpcId') def set_VpcId(self,VpcId): self.add_query_param('VpcId',VpcId) def get_ZoneId(self): return self.get_query_params().get('ZoneId') def set_ZoneId(self,ZoneId): self.add_query_param('ZoneId',ZoneId) def get_PayType(self): return self.get_query_params().get('PayType') def set_PayType(self,PayType): self.add_query_param('PayType',PayType) def get_PricingCycle(self): return self.get_query_params().get('PricingCycle') def set_PricingCycle(self,PricingCycle): self.add_query_param('PricingCycle',PricingCycle
null
1,468
# -*- coding: utf-8 -*- import builtins import json import unittest import mock import pytest from django.core.exceptions import ValidationError from nose.tools import * # noqa: F403 (PEP8 asserts) from framework.auth import Auth from osf_tests.factories import (AuthUserFactory, NodeLicenseRecordFactory, ProjectFactory) from tests.base import OsfTestCase from osf.utils.migrations import ensure_licenses from tests.utils import assert_logs, assert_not_logs from website import settings from osf.models.licenses import NodeLicense, serialize_node_license_record, serialize_node_license from osf.models import NodeLog from osf.exceptions import NodeStateError CHANGED_NAME = 'FOO BAR' CHANGED_TEXT = 'Some good new text' CHANGED_PROPERTIES = ['foo', 'bar'] LICENSE_TEXT = json.dumps({ 'MIT': { 'name': CHANGED_NAME, 'text': CHANGED_TEXT, 'properties': CHANGED_PROPERTIES } }) class TestNodeLicenses(OsfTestCase): def setUp(self): super(TestNodeLicenses, self).setUp() self.user = AuthUserFactory() self.node = ProjectFactory(creator=self.user) self.LICENSE_NAME = 'MIT License' self.node_license = NodeLicense.objects.get(name=self.LICENSE_NAME) self.YEAR = '2105' self.COPYRIGHT_HOLDERS = ['Foo', 'Bar'] self.node.node_license = NodeLicenseRecordFactory( node_license=self.node_license, year=self.YEAR, copyright_holders=self.COPYRIGHT_HOLDERS ) self.node.save() def METHOD_NAME(self): serialized = serialize_node_license(self.node_license) assert_equal(serialized['name'], self.LICENSE_NAME) assert_equal(serialized['id'], self.node_license.license_id) assert_equal(serialized['text'], self.node_license.text) def test_serialize_node_license_record(self): serialized = serialize_node_license_record(self.node.node_license) assert_equal(serialized['name'], self.LICENSE_NAME) assert_equal(serialized['id'], self.node_license.license_id) assert_equal(serialized['text'], self.node_license.text) assert_equal(serialized['year'], self.YEAR) assert_equal(serialized['copyright_holders'], self.COPYRIGHT_HOLDERS) def test_serialize_node_license_record_None(self): self.node.node_license = None serialized = serialize_node_license_record(self.node.node_license) assert_equal(serialized, {}) def test_copy_node_license_record(self): record = self.node.node_license copied = record.copy() assert_is_not_none(copied._id) assert_not_equal(record._id, copied._id) for prop in ('license_id', 'name', 'node_license'): assert_equal(getattr(record, prop), getattr(copied, prop)) @pytest.mark.enable_implicit_clean def test_license_uniqueness_on_id_is_enforced_in_the_database(self): NodeLicense(license_id='foo', name='bar', text='baz').save() assert_raises(ValidationError, NodeLicense(license_id='foo', name='buz', text='boo').save) def test_ensure_licenses_updates_existing_licenses(self): assert_equal(ensure_licenses(), (0, 18)) def test_ensure_licenses_no_licenses(self): before_count = NodeLicense.objects.all().count() NodeLicense.objects.all().delete() assert_false(NodeLicense.objects.all().count()) ensure_licenses() assert_equal(before_count, NodeLicense.objects.all().count()) def test_ensure_licenses_some_missing(self): NodeLicense.objects.get(license_id='LGPL3').delete() with assert_raises(NodeLicense.DoesNotExist): NodeLicense.objects.get(license_id='LGPL3') ensure_licenses() found = NodeLicense.objects.get(license_id='LGPL3') assert_is_not_none(found) def test_ensure_licenses_updates_existing(self): with mock.patch.object(builtins, 'open', mock.mock_open(read_data=LICENSE_TEXT)): ensure_licenses() MIT = NodeLicense.objects.get(license_id='MIT') assert_equal(MIT.name, CHANGED_NAME) assert_equal(MIT.text, CHANGED_TEXT) assert_equal(MIT.properties, CHANGED_PROPERTIES) @assert_logs(NodeLog.CHANGED_LICENSE, 'node') def test_Node_set_node_license(self): GPL3 = NodeLicense.objects.get(license_id='GPL3') NEW_YEAR = '2014' COPYLEFT_HOLDERS = ['Richard Stallman'] self.node.set_node_license( { 'id': GPL3.license_id, 'year': NEW_YEAR, 'copyrightHolders': COPYLEFT_HOLDERS }, auth=Auth(self.user), save=True ) assert_equal(self.node.node_license.license_id, GPL3.license_id) assert_equal(self.node.node_license.name, GPL3.name) assert_equal(self.node.node_license.copyright_holders, COPYLEFT_HOLDERS) @assert_not_logs(NodeLog.CHANGED_LICENSE, 'node') def test_Node_set_node_license_invalid(self): with assert_raises(NodeStateError): self.node.set_node_license( { 'id': 'SOME ID', 'year': 'foo', 'copyrightHolders': [] }, auth=Auth(self.user) )
null
1,469
""" testing models """ from io import BytesIO import pathlib import pytest from dateutil.parser import parse from PIL import Image from django.core.files.base import ContentFile from django.test import TestCase from django.utils import timezone from bookwyrm import models, settings from bookwyrm.models.book import isbn_10_to_13, isbn_13_to_10 from bookwyrm.settings import ENABLE_THUMBNAIL_GENERATION class Book(TestCase): """not too much going on in the books model but here we are""" def setUp(self): """we'll need some books""" self.work = models.Work.objects.create( title="Example Work", remote_id="https://example.com/book/1" ) self.first_edition = models.Edition.objects.create( title="Example Edition", parent_work=self.work ) self.second_edition = models.Edition.objects.create( title="Another Example Edition", parent_work=self.work, ) def test_remote_id(self): """fanciness with remote/origin ids""" remote_id = f"https://{settings.DOMAIN}/book/{self.work.id}" self.assertEqual(self.work.get_remote_id(), remote_id) self.assertEqual(self.work.remote_id, remote_id) def test_generated_links(self): """links produced from identifiers""" book = models.Edition.objects.create( title="ExEd", parent_work=self.work, openlibrary_key="OL123M", inventaire_id="isbn:123", ) self.assertEqual(book.openlibrary_link, "https://openlibrary.org/books/OL123M") self.assertEqual(book.inventaire_link, "https://inventaire.io/entity/isbn:123") def test_create_book_invalid(self): """you shouldn't be able to create Books (only editions and works)""" self.assertRaises(ValueError, models.Book.objects.create, title="Invalid Book") def test_isbn_10_to_13(self): """checksums and so on""" isbn_10 = "178816167X" isbn_13 = isbn_10_to_13(isbn_10) self.assertEqual(isbn_13, "9781788161671") isbn_10 = "1-788-16167-X" isbn_13 = isbn_10_to_13(isbn_10) self.assertEqual(isbn_13, "9781788161671") def test_isbn_13_to_10(self): """checksums and so on""" isbn_13 = "9781788161671" isbn_10 = isbn_13_to_10(isbn_13) self.assertEqual(isbn_10, "178816167X") isbn_13 = "978-1788-16167-1" isbn_10 = isbn_13_to_10(isbn_13) self.assertEqual(isbn_10, "178816167X") def test_get_edition_info(self): """text slug about an edition""" book = models.Edition.objects.create(title="Test Edition") self.assertEqual(book.edition_info, "") book.physical_format = "worm" book.save() self.assertEqual(book.edition_info, "worm") book.languages = ["English"] book.save() self.assertEqual(book.edition_info, "worm") book.languages = ["Glorbish", "English"] book.save() self.assertEqual(book.edition_info, "worm, Glorbish language") book.published_date = timezone.make_aware(parse("2020")) book.save() self.assertEqual(book.edition_info, "worm, Glorbish language, 2020") self.assertEqual(book.alt_text, "Test Edition (worm, Glorbish language, 2020)") def test_get_rank(self): """sets the data quality index for the book""" # basic rank self.assertEqual(self.first_edition.edition_rank, 0) self.first_edition.description = "hi" self.first_edition.save() self.assertEqual(self.first_edition.edition_rank, 1) @pytest.mark.skipif( not ENABLE_THUMBNAIL_GENERATION, reason="Thumbnail generation disabled in settings", ) def METHOD_NAME(self): """Just hit them""" image_file = pathlib.Path(__file__).parent.joinpath( "../../static/images/default_avi.jpg" ) image = Image.open(image_file) output = BytesIO() image.save(output, format=image.format) book = models.Edition.objects.create(title="hello") book.cover.save("test.jpg", ContentFile(output.getvalue())) self.assertIsNotNone(book.cover_bw_book_xsmall_webp.url) self.assertIsNotNone(book.cover_bw_book_xsmall_jpg.url) self.assertIsNotNone(book.cover_bw_book_small_webp.url) self.assertIsNotNone(book.cover_bw_book_small_jpg.url) self.assertIsNotNone(book.cover_bw_book_medium_webp.url) self.assertIsNotNone(book.cover_bw_book_medium_jpg.url) self.assertIsNotNone(book.cover_bw_book_large_webp.url) self.assertIsNotNone(book.cover_bw_book_large_jpg.url) self.assertIsNotNone(book.cover_bw_book_xlarge_webp.url) self.assertIsNotNone(book.cover_bw_book_xlarge_jpg.url) self.assertIsNotNone(book.cover_bw_book_xxlarge_webp.url) self.assertIsNotNone(book.cover_bw_book_xxlarge_jpg.url) def test_populate_sort_title(self): """The sort title should remove the initial article on save""" books = ( models.Edition.objects.create( title=f"{article} Test Edition", languages=[langauge] ) for langauge, articles in settings.LANGUAGE_ARTICLES.items() for article in articles ) self.assertTrue(all(book.sort_title == "test edition" for book in books)) def test_repair_edition(self): """Fix editions with no works""" edition = models.Edition.objects.create(title="test") edition.authors.set([models.Author.objects.create(name="Author Name")]) self.assertIsNone(edition.parent_work) edition.repair() edition.refresh_from_db() self.assertEqual(edition.parent_work.title, "test") self.assertEqual(edition.parent_work.authors.count(), 1)
null
1,470
from unittest import TestCase import service from simulation.avatar.avatar_manager import AvatarManager from simulation.game_state import GameState from simulation.interactables.score_location import ScoreLocation from simulation.location import Location from simulation.world_map import WorldMap from .test_simulation.dummy_avatar import MoveEastDummy from .test_simulation.maps import MockCell, MockPickup from .test_simulation.mock_game_state import MockGameState import pytest @pytest.fixture(scope="module") def avatar_manager(): class DummyAvatarManager(AvatarManager): avatars = [MoveEastDummy(1, Location(0, -1))] return DummyAvatarManager() @pytest.fixture(scope="module") def world_state_json(avatar_manager): CELLS = [ [ { "interactable": MockPickup("b"), "avatar": avatar_manager.avatars[0], }, {}, {}, ], [{}, {"habitable": False}, {"interactable": MockPickup("a")}], ] grid = { Location(x, y - 1): MockCell(Location(x, y - 1), **CELLS[x][y]) for y in range(3) for x in range(2) } grid[Location(0, 1)].interactable = ScoreLocation(grid[Location(0, 1)]) test_game_state = GameState(WorldMap(grid, {}), avatar_manager) return test_game_state.serialize() def test_correct_json_player_dictionary(world_state_json): """ Ensures the "players" element of the get_game_state() JSON returns the correct information for the dummy avatar provided into the world. NOTE: Orientation (and others) may be hard coded. This test WILL and SHOULD fail if the functionality is added. """ player_list = world_state_json["players"] assert len(player_list) == 1 details = player_list[0] assert details["id"] == 1 assert details["location"]["x"] == 0 assert details["location"]["y"] == -1 assert details["orientation"] == "north" def test_correct_json_score_locations(world_state_json): """ Ensures the correct score location in the "score_locations" element; is returned by the JSON. """ interactable_list = world_state_json["interactables"] for interactable in interactable_list: if "ScoreLocation" in interactable: assert interactable["location"]["x"] == 0 assert interactable["location"]["y"] == 1 def test_correct_json_north_east_corner(world_state_json): """ Top right corner of the map must be correct to determine the map size. """ north_east_corner = world_state_json["northEastCorner"] assert north_east_corner["x"] == 1 assert north_east_corner["y"] == 1 def METHOD_NAME(world_state_json): """ Bottom left corner of the map must be correct to determine the map size. """ south_west_corner = world_state_json["southWestCorner"] assert south_west_corner["x"] == 0 assert south_west_corner["y"] == -1 def test_correct_json_era(world_state_json): """ Ensure that the era (for the assets in the frontend) is correct. NOTE: This is hard coded right now to "future". This test should fail when this functionality is added. """ era = world_state_json["era"] assert era == "future" def test_correct_json_world_interactables_returned_is_correct_amount(world_state_json): """ The JSON returns the correct amount of pickups. """ interactable_list = world_state_json["interactables"] assert len(interactable_list) == 3 def test_correct_json_world_obstacles(world_state_json): """ JSON generated must return correct location, width, height, type and orientation about obstacles. NOTE: Obstacles are highly hard coded right now. Only location changes. If any functionality is added, this test WILL and SHOULD fail. """ obstacle_list = world_state_json["obstacles"] assert len(obstacle_list) == 1 assert obstacle_list[0]["location"]["x"] == 1 assert obstacle_list[0]["location"]["y"] == 0 assert obstacle_list[0]["texture"] == 1
null
1,471
# Copyright (C) 2015-2022 Regents of the University of California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Accelerator (i.e. GPU) utilities for Toil""" import os import subprocess from typing import Dict, List, Optional, Set, Union, cast from xml.dom import minidom from toil.job import AcceleratorRequirement from toil.lib.memoize import memoize @memoize def have_working_nvidia_smi() -> bool: """ Return True if the nvidia-smi binary, from nvidia's CUDA userspace utilities, is installed and can be run successfully. TODO: This isn't quite the same as the check that cwltool uses to decide if it can fulfill a CUDARequirement. """ try: subprocess.check_call(['nvidia-smi']) except (FileNotFoundError, subprocess.CalledProcessError): return False return True @memoize def METHOD_NAME() -> List[int]: """ Work out what accelerator is what. For each accelerator visible to us, returns the host-side (for example, outside-of-Slurm-job) number for that accelerator. It is often the same as the apparent number. Can be used with Docker's --gpus='"device=#,#,#"' option to forward the right GPUs as seen from a Docker daemon. """ for number_list_var in ['SLURM_STEP_GPUS', 'SLURM_JOB_GPUS', 'CUDA_VISIBLE_DEVICES', 'NVIDIA_VISIBLE_DEVICES']: # Any of these can have a list of GPU numbers, but the CUDA/NVIDIA ones # also support a system of GPU GUIDs that we don't support. # TODO: If Slurm confinement is set we ignore any attempt to further # limit us with the app-level variables. Does that make sense? Writing # code to translate through would be hard and probably not actually # useful. if number_list_var in os.environ: device_string = os.environ[number_list_var] # Parse all the numbers we have device_numbers = [int(part) for part in device_string.split(',') if part.isnumeric()] if len(device_numbers) > 0: # We found some numbers, so use those return device_numbers # If we don't see a set of limits we understand, say we have all nvidia GPUs return list(range(count_nvidia_gpus())) @memoize def have_working_nvidia_docker_runtime() -> bool: """ Return True if Docker exists and can handle an "nvidia" runtime and the "--gpus" option. """ try: # The runtime injects nvidia-smi; it doesn't seem to have to be in the image we use here subprocess.check_call(['docker', 'run', '--rm', '--runtime', 'nvidia', '--gpus', 'all', 'ubuntu:20.04', 'nvidia-smi']) except (FileNotFoundError, subprocess.CalledProcessError): return False return True @memoize def count_nvidia_gpus() -> int: """ Return the number of nvidia GPUs seen by nvidia-smi, or 0 if it is not working. """ # I don't have nvidia-smi, but cwltool knows how to do this, so we do what # they do: # <https://github.com/common-workflow-language/cwltool/blob/6f29c59fb1b5426ef6f2891605e8fa2d08f1a8da/cwltool/cuda.py> # Some example output is here: <https://gist.github.com/loretoparisi/2620b777562c2dfd50d6b618b5f20867> try: return int( cast( minidom.Text, minidom.parseString(subprocess.check_output(["nvidia-smi", "-q", "-x"])) .getElementsByTagName("attached_gpus")[0] .firstChild, ).data ) except ( FileNotFoundError, subprocess.CalledProcessError, IndexError, ValueError, PermissionError, ): return 0 # TODO: Parse each gpu > product_name > text content and convert to some # kind of "model" that agrees with e.g. Kubernetes naming. @memoize def get_individual_local_accelerators() -> List[AcceleratorRequirement]: """ Determine all the local accelerators available. Report each with count 1, in the order of the number that can be used to assign them. TODO: How will numbers work with multiple types of accelerator? We need an accelerator assignment API. """ # For now we only know abput nvidia GPUs return [{'kind': 'gpu', 'brand': 'nvidia', 'api': 'cuda', 'count': 1} for _ in range(count_nvidia_gpus())] def get_restrictive_environment_for_local_accelerators(accelerator_numbers : Union[Set[int], List[int]]) -> Dict[str, str]: """ Get environment variables which can be applied to a process to restrict it to using only the given accelerator numbers. The numbers are in the space of accelerators returned by get_individual_local_accelerators(). """ # Since we only know about nvidia GPUs right now, we can just say our # accelerator numbering space is the same as nvidia's GPU numbering space. gpu_list = ','.join(str(i) for i in accelerator_numbers) # Put this in several places: CUDA_VISIBLE_DEVICES for controlling # processes right here, and SINGULARITYENV_CUDA_VISIBLE_DEVICES for # propagating to Singularity containers. return {'CUDA_VISIBLE_DEVICES': gpu_list, 'SINGULARITYENV_CUDA_VISIBLE_DEVICES': gpu_list}
null
1,472
import logging import tool_shed.util.shed_util_common as suc from galaxy import ( exceptions, util, web, ) from galaxy.model.base import transaction from galaxy.security.validate_user_input import ( validate_email, validate_password, validate_publicname, ) from galaxy.webapps.base.controller import BaseAPIController log = logging.getLogger(__name__) class UsersController(BaseAPIController): """RESTful controller for interactions with users in the Tool Shed.""" @web.expose_api @web.require_admin def create(self, trans, payload, **kwd): """ POST /api/users Returns a dictionary of information about the created user. : param key: the current Galaxy admin user's API key The following parameters are included in the payload. :param email (required): the email address of the user :param password (required): the password of the user :param username (required): the public username of the user """ # Get the information about the user to be created from the payload. email = payload.get("email", "") password = payload.get("password", "") username = payload.get("username", "") message = self.__validate(trans, email=email, password=password, confirm=password, username=username) if message: raise exceptions.RequestParameterInvalidException(message) # Create the user. user = self.__create_user(trans, email, username, password) user_dict = user.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) user_dict["message"] = f"User '{str(user.username)}' has been created." user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) return user_dict def __create_user(self, trans, email, username, password): user = trans.app.model.User(email=email) user.set_password_cleartext(password) user.username = username if trans.app.config.user_activation_on: user.active = False else: user.active = True # Activation is off, every new user is active by default. trans.sa_session.add(user) with transaction(trans.sa_session): trans.sa_session.commit() trans.app.security_agent.create_private_user_role(user) return user def __get_value_mapper(self, trans): value_mapper = {"id": trans.security.encode_id} return value_mapper @web.expose_api_anonymous def METHOD_NAME(self, trans, deleted=False, **kwd): """ GET /api/users Returns a list of dictionaries that contain information about each user. """ # Example URL: http://localhost:9009/api/users user_dicts = [] deleted = util.asbool(deleted) for user in ( trans.sa_session.query(trans.app.model.User) .filter(trans.app.model.User.table.c.deleted == deleted) .order_by(trans.app.model.User.table.c.username) ): user_dict = user.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) user_dicts.append(user_dict) return user_dicts @web.expose_api_anonymous def show(self, trans, id, **kwd): """ GET /api/users/{encoded_user_id} GET /api/users/current Returns a dictionary of information about a user. :param id: the encoded id of the User object. """ user = None # user is requesting data about themselves user = trans.user if id == "current" else suc.get_user(trans.app, id) if user is None: user_dict = dict(message=f"Unable to locate user record for id {str(id)}.", status="error") return user_dict user_dict = user.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) return user_dict def __validate(self, trans, email, password, confirm, username): if username in ["repos"]: return f"The term '{username}' is a reserved word in the Tool Shed, so it cannot be used as a public user name." message = "\n".join( ( validate_email(trans, email), validate_password(trans, password, confirm), validate_publicname(trans, username), ) ).rstrip() return message
null
1,473
''' Copyright (C) 2017-2023 Bryant Moscon - [email protected] Please see the LICENSE file for the terms and conditions associated with this software. ''' from collections import defaultdict from typing import Dict, Tuple from cryptofeed.connection import AsyncConnection, RestEndpoint, Routes, WebsocketEndpoint import logging from decimal import Decimal from yapic import json from cryptofeed.defines import ASCENDEX, BID, ASK, BUY, L2_BOOK, SELL, TRADES from cryptofeed.exceptions import MissingSequenceNumber from cryptofeed.feed import Feed from cryptofeed.symbols import Symbol from cryptofeed.types import Trade, OrderBook LOG = logging.getLogger('feedhandler') class AscendEX(Feed): id = ASCENDEX rest_endpoints = [RestEndpoint('https://ascendex.com', routes=Routes('/api/pro/v1/products'), sandbox='https://api-test.ascendex-sandbox.com')] websocket_channels = { L2_BOOK: 'depth:', TRADES: 'trades:', } # Docs, https://ascendex.github.io/ascendex-pro-api/#websocket-authentication # noinspection PyTypeChecker websocket_endpoints = [WebsocketEndpoint('wss://ascendex.com/1/api/pro/v1/stream', channel_filter=(websocket_channels[L2_BOOK], websocket_channels[TRADES],), sandbox='wss://api-test.ascendex-sandbox.com/1/api/pro/v1/stream',)] @classmethod def timestamp_normalize(cls, ts: float) -> float: return ts / 1000.0 @classmethod def _parse_symbol_data(cls, data: dict) -> Tuple[Dict, Dict]: ret = {} info = defaultdict(dict) for entry in data['data']: # Only "Normal" status symbols are tradeable if entry['status'] == 'Normal': s = Symbol(entry['baseAsset'], entry['quoteAsset']) ret[s.normalized] = entry['symbol'] info['tick_size'][s.normalized] = entry['tickSize'] info['instrument_type'][s.normalized] = s.type return ret, info def __reset(self): self._l2_book = {} self.seq_no = defaultdict(lambda: None) async def METHOD_NAME(self, msg: dict, timestamp: float): """ { 'm': 'trades', 'symbol': 'BTC/USDT', 'data': [{ 'p': '23169.76', 'q': '0.00899', 'ts': 1608760026461, 'bm': False, 'seqnum': 72057614186183012 }] } """ for trade in msg['data']: t = Trade(self.id, self.exchange_symbol_to_std_symbol(msg['symbol']), SELL if trade['bm'] else BUY, Decimal(trade['q']), Decimal(trade['p']), self.timestamp_normalize(trade['ts']), raw=trade) await self.callback(TRADES, t, timestamp) async def _book(self, msg: dict, timestamp: float): sequence_number = msg['data']['seqnum'] pair = self.exchange_symbol_to_std_symbol(msg['symbol']) delta = {BID: [], ASK: []} if msg['m'] == 'depth-snapshot': self.seq_no[pair] = sequence_number self._l2_book[pair] = OrderBook(self.id, pair, max_depth=self.max_depth) else: # ignore messages while we wait for the snapshot if self.seq_no[pair] is None: return if self.seq_no[pair] + 1 != sequence_number: raise MissingSequenceNumber self.seq_no[pair] = sequence_number for side in ('bids', 'asks'): for price, amount in msg['data'][side]: s = BID if side == 'bids' else ASK price = Decimal(price) size = Decimal(amount) if size == 0: delta[s].append((price, 0)) if price in self._l2_book[pair].book[s]: del self._l2_book[pair].book[s][price] else: delta[s].append((price, size)) self._l2_book[pair].book[s][price] = size await self.book_callback(L2_BOOK, self._l2_book[pair], timestamp, timestamp=self.timestamp_normalize(msg['data']['ts']), raw=msg, delta=delta if msg['m'] != 'depth-snapshot' else None, sequence_number=sequence_number) async def message_handler(self, msg: str, conn, timestamp: float): msg = json.loads(msg, parse_float=Decimal) if 'm' in msg: if msg['m'] == 'depth' or msg['m'] == 'depth-snapshot': await self._book(msg, timestamp) elif msg['m'] == 'trades': await self.METHOD_NAME(msg, timestamp) elif msg['m'] == 'ping': await conn.write('{"op":"pong"}') elif msg['m'] == 'connected': return elif msg['m'] == 'sub': return else: LOG.warning("%s: Invalid message type %s", self.id, msg) else: LOG.warning("%s: Invalid message type %s", self.id, msg) async def subscribe(self, conn: AsyncConnection): self.__reset() l2_pairs = [] for channel in self.subscription: pairs = self.subscription[channel] if channel == "depth:": l2_pairs.extend(pairs) message = {'op': 'sub', 'ch': channel + ','.join(pairs)} await conn.write(json.dumps(message)) for pair in l2_pairs: message = {"op": "req", "action": "depth-snapshot", "args": {"symbol": pair}} await conn.write(json.dumps(message))
null
1,474
################################################################################ # Creme is a free/open-source Customer Relationship Management software # Copyright (C) 2014-2022 Hybird # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ################################################################################ from functools import partial from django.contrib.contenttypes.models import ContentType from django.db.transaction import atomic from django.http import HttpResponse from django.shortcuts import get_object_or_404 from creme import persons from creme.creme_core.http import CremeJsonResponse from creme.creme_core.models import EntityFilter from creme.creme_core.utils import ( bool_from_str_extended, get_from_GET_or_404, get_from_POST_or_404, ) from creme.creme_core.views.generic import CheckedView from .bricks import _NeighboursMapBrick from .models import GeoAddress from .utils import address_as_dict, addresses_from_persons, get_radius Address = persons.get_address_model() class AddressInfoSetting(CheckedView): permissions = 'persons' @atomic def post(self, request, *args, **kwargs): get_arg = partial(get_from_POST_or_404, request.POST) address = get_object_or_404( Address.objects.select_for_update(), id=get_arg('id', cast=int), ) request.user.has_perm_to_change_or_die(address.owner) data = { 'latitude': get_arg('latitude'), 'longitude': get_arg('longitude'), 'geocoded': bool_from_str_extended(get_arg('geocoded')), 'status': get_arg('status'), } try: address.geoaddress.update(**data) except GeoAddress.DoesNotExist: GeoAddress.objects.create(address=address, **data) return HttpResponse() class BaseAddressesInformation(CheckedView): permissions = 'persons' response_class = CremeJsonResponse efilter_id_arg = 'id' def get_efilter_id(self): return self.request.GET.METHOD_NAME(self.efilter_id_arg) def get_efilter(self): filter_id = self.get_efilter_id() return get_object_or_404(EntityFilter, pk=filter_id) if filter_id else None def get_info(self, request): raise NotImplementedError def METHOD_NAME(self, request, *args, **kwargs): return self.response_class(self.get_info(request)) class AddressesInformation(BaseAddressesInformation): entity_classes = [ persons.get_contact_model(), persons.get_organisation_model(), ] def get_info(self, request): entity_filter = self.get_efilter() user = request.user def owner_groups(): if entity_filter: # TODO: assert in self.entity_classes model = entity_filter.entity_type.model_class() yield entity_filter.filter(model.objects.all(), user=user) else: for model in self.entity_classes: yield model.objects.all() addresses = [ address for owners in owner_groups() for address in addresses_from_persons(owners, user) ] GeoAddress.populate_geoaddresses(addresses) return {'addresses': [address_as_dict(address) for address in addresses]} class NeighboursInformation(BaseAddressesInformation): efilter_id_arg = 'filter_id' def get_info(self, request): GET = request.GET source = get_object_or_404( Address, id=get_from_GET_or_404(GET, 'address_id', int), ) entity_filter = self.get_efilter() # TODO: error if value but invalid as float... query_distance = GET.METHOD_NAME('distance', '') distance = float(query_distance) if query_distance.isdigit() else get_radius() neighbours = source.geoaddress.neighbours(distance).select_related('address') if entity_filter: ctype = entity_filter.entity_type # Filter owners of neighbours owner_ids = entity_filter.filter( ctype.get_all_objects_for_this_type( is_deleted=False, pk__in=neighbours.values_list('address__object_id', flat=True), ) ).values_list('pk', flat=True) neighbours = neighbours.filter( address__content_type=ctype, address__object_id__in=owner_ids, ) else: # All Contacts & Organisations # TODO: get the allowed ContentTypes as GET arguments neighbours = neighbours.filter( address__content_type__in=map( ContentType.objects.get_for_model, _NeighboursMapBrick.target_ctypes, ), address__object__is_deleted=False, ) # Filter credentials has_perm = request.user.has_perm_to_view addresses = [ address_as_dict(neighbour.address) for neighbour in neighbours if has_perm(neighbour.address.owner) # TODO: populate owner ? ] return { 'source_address': address_as_dict(source), 'addresses': addresses, }
null
1,475
#!/usr/bin/env python3 # SPDX-FileCopyrightText: 2009 Fermi Research Alliance, LLC # SPDX-License-Identifier: Apache-2.0 """ Project: glideinWMS Description: unit test for glideinwms/creation/lib/cgWParamDict.py Author: Dennis Box, [email protected] """ import os import sys import unittest from unittest import mock import xmlrunner from glideinwms.creation.lib import factoryXmlConfig from glideinwms.creation.lib.cgWParamDict import ( add_attr_unparsed, add_attr_unparsed_real, add_file_unparsed, calc_monitoring_collectors_string, calc_primary_monitoring_collectors, get_valid_condor_tarballs, iter_to_dict, itertools_product, old_get_valid_condor_tarballs, populate_factory_descript, populate_frontend_descript, populate_gridmap, populate_job_descript, validate_condor_tarball_attrs, ) from glideinwms.creation.lib.cWParamDict import has_file_wrapper, has_file_wrapper_params from glideinwms.unittests.unittest_utils import balanced_text, TestImportError try: from glideinwms.creation.lib import cgWParamDict except ImportError as err: raise TestImportError(str(err)) XML = "fixtures/factory/glideinWMS.xml" # We assume that this module is in the unittest directory module_globals = globals() unittest_dir = os.path.dirname(os.path.realpath(module_globals["__file__"])) # Prepending to the PATH to be first in the search if "GLIDEINWMS_LOCATION" in os.environ: os.environ["PATH"] = ( os.path.join(os.environ["GLIDEINWMS_LOCATION"], "unittests", "fixtures", "bin") + os.pathsep + os.path.join(os.environ["GLIDEINWMS_LOCATION"], "creation") + os.pathsep + os.environ["PATH"] ) else: os.environ["PATH"] = ( os.path.join(unittest_dir, "fixtures", "bin") + os.pathsep + os.path.join(unittest_dir, "../creation") + os.pathsep + os.environ["PATH"] ) class TestGlideinDicts(unittest.TestCase): def setUp(self): self.conf = factoryXmlConfig.parse(XML) self.cgpd = cgWParamDict.glideinDicts(self.conf) self.cgpd.populate() def METHOD_NAME(self): self.assertTrue(isinstance(self.cgpd, cgWParamDict.glideinDicts)) def test_submit_files_ok(self): work_dir = self.cgpd.work_dir for item in self.cgpd.sub_list: entry = "entry_%s" % item condir = os.path.join(work_dir, entry) confile = os.path.join(condir, "job.condor") self.assertTrue(os.path.exists(confile), "%s not found! " % confile) with open(confile) as cf: data = cf.readlines() rslt = balanced_text(data) self.assertEqual("Balanced", rslt, f"{rslt} {confile}") def test_new_MainDicts(self): nmd = self.cgpd.new_MainDicts() self.assertTrue(isinstance(nmd, cgWParamDict.glideinMainDicts)) def test_new_SubDicts(self): nsd = self.cgpd.new_SubDicts("entry_osg34_el7") self.assertTrue(isinstance(nsd, cgWParamDict.glideinEntryDicts)) def test_save(self): self.cgpd.save() def test_save_pub_key(self): nmd = self.cgpd.new_MainDicts() nmd.save_pub_key() def test_save_monitor(self): nmd = self.cgpd.new_MainDicts() nmd.save_monitor() def test_MainDicts_populate(self): nmd = self.cgpd.new_MainDicts() nmd.populate() def test_reuse(self): nmd = self.cgpd.new_MainDicts() self.cgpd.main_dicts.reuse(nmd) def test_has_file_wrapper(self): self.assertEqual(False, has_file_wrapper(self.cgpd.main_dicts)) class TestAddFileUnparsed(unittest.TestCase): @unittest.skip("for now") def test_add_file_unparsed(self): # self.assertEqual( # expected, add_file_unparsed( # user_file, dicts, is_factory)) assert False # TODO: implement your test here class TeOBstAddAttrUnparsed(unittest.TestCase): @unittest.skip("for now") def test_add_attr_unparsed(self): # self.assertEqual(expected, add_attr_unparsed(attr, dicts, description)) assert False # TODO: implement your test here class TestAddAttrUnparsedReal(unittest.TestCase): @unittest.skip("for now") def test_add_attr_unparsed_real(self): # self.assertEqual(expected, add_attr_unparsed_real(attr, dicts)) assert False # TODO: implement your test here class TestIterToDict(unittest.TestCase): @unittest.skip("for now") def test_iter_to_dict(self): # self.assertEqual(expected, iter_to_dict(dictObject)) assert False # TODO: implement your test here class TestPopulateFactoryDescript(unittest.TestCase): @unittest.skip("for now") def test_populate_factory_descript(self): # self.assertEqual( # expected, # populate_factory_descript( # work_dir, # glidein_dict, # active_sub_list, # disabled_sub_list, # conf)) assert False # TODO: implement your test here class TestPopulateJobDescript(unittest.TestCase): @unittest.skip("for now") def test_populate_job_descript(self): # self.assertEqual( # expected, # populate_job_descript( # work_dir, # job_descript_dict, # sub_name, # entry, # schedd)) assert False # TODO: implement your test here class TestPopulateFrontendDescript(unittest.TestCase): @unittest.skip("for now") def test_populate_frontend_descript(self): # self.assertEqual( # expected, populate_frontend_descript( # frontend_dict, conf)) assert False # TODO: implement your test here class TestPopulateGridmap(unittest.TestCase): @unittest.skip("for now") def test_populate_gridmap(self): # self.assertEqual(expected, populate_gridmap(conf, gridmap_dict)) assert False # TODO: implement your test here class TestValidateCondorTarballAttrs(unittest.TestCase): @unittest.skip("for now") def test_validate_condor_tarball_attrs(self): # self.assertEqual(expected, validate_condor_tarball_attrs(conf)) assert False # TODO: implement your test here class TestOldGetValidCondorTarballs(unittest.TestCase): @unittest.skip("for now") def test_old_get_valid_condor_tarballs(self): # self.assertEqual(expected, old_get_valid_condor_tarballs(params)) assert False # TODO: implement your test here class TestGetValidCondorTarballs(unittest.TestCase): @unittest.skip("for now") def test_get_valid_condor_tarballs(self): # self.assertEqual(expected, get_valid_condor_tarballs(condor_tarballs)) assert False # TODO: implement your test here class TestItertoolsProduct(unittest.TestCase): @unittest.skip("for now") def test_itertools_product(self): # self.assertEqual(expected, itertools_product(*args, **kwds)) assert False # TODO: implement your test here class TestCalcMonitoringCollectorsString(unittest.TestCase): @unittest.skip("for now") def test_calc_monitoring_collectors_string(self): # self.assertEqual( # expected, # calc_monitoring_collectors_string(collectors)) assert False # TODO: implement your test here class TestCalcPrimaryMonitoringCollectors(unittest.TestCase): @unittest.skip("for now") def test_calc_primary_monitoring_collectors(self): # self.assertEqual( # expected, # calc_primary_monitoring_collectors(collectors)) assert False # TODO: implement your test here if __name__ == "__main__": unittest.main(testRunner=xmlrunner.XMLTestRunner(output="unittests-reports"))
null
1,476
# coding=utf-8 # Copyright 2023 The Uncertainty Baselines Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for training utilities used in the ViT experiments.""" from absl.testing import absltest from absl.testing import parameterized import jax import numpy as np import train_utils # local file import from baselines.jft class TrainUtilsTest(parameterized.TestCase): def test_sigmoid_xent(self): key = jax.random.PRNGKey(42) key1, key2 = jax.random.split(key) n = 5 logits = jax.random.normal(key1, shape=(n,)) labels = jax.random.bernoulli(key2, shape=(n,)) expected_loss = 5.22126 actual_loss = train_utils.sigmoid_xent(logits=logits, labels=labels) np.testing.assert_allclose(actual_loss, expected_loss, rtol=1e-06, atol=1e-06) def test_softmax_xent(self): key = jax.random.PRNGKey(42) key1, key2 = jax.random.split(key) n = 5 k = 3 logits = jax.random.normal(key1, shape=(n, k)) labels = jax.nn.one_hot( jax.random.randint(key2, shape=(n,), minval=0, maxval=k - 1), num_classes=k) expected_loss = 2.55749 actual_loss = train_utils.sigmoid_xent(logits=logits, labels=labels) np.testing.assert_allclose(actual_loss, expected_loss, rtol=1e-06, atol=1e-06) def test_accumulate_gradient(self): # TODO(dusenberrymw): Add a test for this. pass def test_create_learning_rate_schedule(self): total_steps = 10 base = 0.1 decay_type = "linear" warmup_steps = 2 linear_end = 1e-4 lr_fn = train_utils.create_learning_rate_schedule( total_steps, base=base, decay_type=decay_type, warmup_steps=warmup_steps, linear_end=linear_end) expected_lrs = [ 0.0, 0.05000000074505806, 0.10000000149011612, 0.08751250058412552, 0.07502499967813492 ] actual_lrs = [float(lr_fn(i)) for i in range(5)] np.testing.assert_allclose(actual_lrs, expected_lrs) decay_type = "cosine" expected_lrs = [0., 0.05, 0.1, 0.087513, 0.075025] actual_lrs = [float(lr_fn(i)) for i in range(5)] np.testing.assert_allclose(actual_lrs, expected_lrs, rtol=1e-06, atol=1e-06) @parameterized.parameters( dict(weight_decay_rules=[], rescale_value=1., learning_rate=1., input_params={"bias": 1., "kernel": 2.}, expected_decayed_params={"bias": 1., "kernel": 2.}), dict(weight_decay_rules=1., rescale_value=1., learning_rate=.5, input_params={"bias": 1., "kernel": 2.}, expected_decayed_params={"bias": 1., "kernel": 1.}), dict(weight_decay_rules=[(".*b.*", .5)], rescale_value=1., learning_rate=1., input_params={"bias": 1., "kernel": 2.}, expected_decayed_params={"bias": 0.5, "kernel": 2.}), dict(weight_decay_rules=[(".*kernel.*", .5), (".*bias.*", 2.)], rescale_value=2., learning_rate=1., input_params={"bias": 1., "kernel": 2.}, expected_decayed_params={"bias": 0., "kernel": 1.5}), ) def test_get_weight_decay_fn( self, weight_decay_rules, rescale_value, learning_rate, input_params, expected_decayed_params): weight_decay_fn = train_utils.get_weight_decay_fn( weight_decay_rules, rescale_value) actual_decayed_params = weight_decay_fn(input_params, learning_rate) actual_leaves = jax.tree_util.tree_leaves(actual_decayed_params) expected_leaves = jax.tree_util.tree_leaves(expected_decayed_params) for actual_arr, expected_arr in zip(actual_leaves, expected_leaves): np.testing.assert_allclose(actual_arr, expected_arr) def test_tree_map_with_regex(self): d = {"this": 1, "that": {"another": 2, "wow": 3, "cool": {"neat": 4}}} f = lambda x, _: x + 1 regex_rules = [(".*anot.*", 1)] mapped_d_expected = { "this": 1, "that": { "another": 3, "wow": 3, "cool": { "neat": 4 } } } mapped_d_actual = train_utils.tree_map_with_regex(f, d, regex_rules) self.assertEqual(mapped_d_actual, mapped_d_expected) regex_rules = [(".*that.*", 1)] mapped_d_expected = { "this": 1, "that": { "another": 3, "wow": 4, "cool": { "neat": 5 } } } mapped_d_actual = train_utils.tree_map_with_regex(f, d, regex_rules) self.assertEqual(mapped_d_actual, mapped_d_expected) def METHOD_NAME(self): self.assertTrue( train_utils.itstime( step=1, every_n_steps=2, total_steps=4, last=True, first=True)) self.assertTrue( train_utils.itstime( step=1, every_n_steps=2, total_steps=4, last=False, first=True)) self.assertTrue( train_utils.itstime( step=2, every_n_steps=2, total_steps=4, last=True, first=True)) self.assertTrue( train_utils.itstime( step=4, every_n_steps=2, total_steps=4, last=True, first=True)) self.assertTrue( train_utils.itstime( step=4, every_n_steps=2, total_steps=4, last=True, first=False)) self.assertFalse( train_utils.itstime( step=1, every_n_steps=2, total_steps=4, last=True, first=False)) self.assertFalse( train_utils.itstime( step=4, every_n_steps=3, total_steps=4, last=False, first=True)) if __name__ == "__main__": absltest.main()
null
1,477
# coding=utf-8 # Copyright 2018-2023 EvaDB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import mock import pytest from mock import ANY, MagicMock from evadb.catalog.catalog_manager import CatalogManager from evadb.catalog.catalog_type import ColumnType, TableType from evadb.catalog.catalog_utils import get_video_table_column_definitions from evadb.catalog.models.column_catalog import ColumnCatalogEntry from evadb.catalog.models.function_catalog import FunctionCatalogEntry from evadb.parser.table_ref import TableInfo from evadb.parser.types import FileFormatType @pytest.mark.notparallel class CatalogManagerTests(unittest.TestCase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @classmethod def setUpClass(cls) -> None: cls.mocks = [ mock.patch("evadb.catalog.catalog_manager.SQLConfig"), mock.patch("evadb.catalog.catalog_manager.init_db"), ] for single_mock in cls.mocks: single_mock.start() cls.addClassCleanup(single_mock.stop) @mock.patch("evadb.catalog.catalog_manager.init_db") def test_catalog_bootstrap(self, mocked_db): x = CatalogManager(MagicMock(), MagicMock()) x._bootstrap_catalog() mocked_db.assert_called() @mock.patch( "evadb.catalog.catalog_manager.CatalogManager.create_and_insert_table_catalog_entry" ) def test_create_multimedia_table_catalog_entry(self, mock): x = CatalogManager(MagicMock(), MagicMock()) name = "myvideo" x.create_and_insert_multimedia_table_catalog_entry( name=name, format_type=FileFormatType.VIDEO ) columns = get_video_table_column_definitions() mock.assert_called_once_with( TableInfo(name), columns, table_type=TableType.VIDEO_DATA, ) @mock.patch("evadb.catalog.catalog_manager.init_db") @mock.patch("evadb.catalog.catalog_manager.TableCatalogService") def test_insert_table_catalog_entry_should_create_table_and_columns( self, ds_mock, initdb_mock ): catalog = CatalogManager(MagicMock(), MagicMock()) file_url = "file1" table_name = "name" columns = [(ColumnCatalogEntry("c1", ColumnType.INTEGER))] catalog.insert_table_catalog_entry(table_name, file_url, columns) ds_mock.return_value.insert_entry.assert_called_with( table_name, file_url, identifier_column="id", table_type=TableType.VIDEO_DATA, column_list=[ANY] + columns, ) @mock.patch("evadb.catalog.catalog_manager.init_db") @mock.patch("evadb.catalog.catalog_manager.TableCatalogService") def test_get_table_catalog_entry_when_table_exists(self, ds_mock, initdb_mock): catalog = CatalogManager(MagicMock(), MagicMock()) table_name = "name" database_name = "database" row_id = 1 table_obj = MagicMock(row_id=row_id) ds_mock.return_value.get_entry_by_name.return_value = table_obj actual = catalog.get_table_catalog_entry( table_name, database_name, ) ds_mock.return_value.get_entry_by_name.assert_called_with( database_name, table_name ) self.assertEqual(actual.row_id, row_id) @mock.patch("evadb.catalog.catalog_manager.init_db") @mock.patch("evadb.catalog.catalog_manager.TableCatalogService") @mock.patch("evadb.catalog.catalog_manager.ColumnCatalogService") def test_get_table_catalog_entry_when_table_doesnot_exists( self, dcs_mock, ds_mock, initdb_mock ): catalog = CatalogManager(MagicMock(), MagicMock()) table_name = "name" database_name = "database" table_obj = None ds_mock.return_value.get_entry_by_name.return_value = table_obj actual = catalog.get_table_catalog_entry(table_name, database_name) ds_mock.return_value.get_entry_by_name.assert_called_with( database_name, table_name ) dcs_mock.return_value.filter_entries_by_table_id.assert_not_called() self.assertEqual(actual, table_obj) @mock.patch("evadb.catalog.catalog_manager.FunctionCatalogService") @mock.patch("evadb.catalog.catalog_manager.FunctionIOCatalogService") @mock.patch("evadb.catalog.catalog_manager.FunctionMetadataCatalogService") @mock.patch("evadb.catalog.catalog_manager.get_file_checksum") def test_insert_function( self, checksum_mock, functionmetadata_mock, functionio_mock, function_mock ): catalog = CatalogManager(MagicMock(), MagicMock()) function_io_list = [MagicMock()] function_metadata_list = [MagicMock()] actual = catalog.insert_function_catalog_entry( "function", "sample.py", "classification", function_io_list, function_metadata_list, ) functionio_mock.return_value.insert_entries.assert_called_with(function_io_list) functionmetadata_mock.return_value.insert_entries.assert_called_with( function_metadata_list ) function_mock.return_value.insert_entry.assert_called_with( "function", "sample.py", "classification", checksum_mock.return_value ) checksum_mock.assert_called_with("sample.py") self.assertEqual(actual, function_mock.return_value.insert_entry.return_value) @mock.patch("evadb.catalog.catalog_manager.FunctionCatalogService") def test_get_function_catalog_entry_by_name(self, function_mock): catalog = CatalogManager(MagicMock(), MagicMock()) actual = catalog.get_function_catalog_entry_by_name("name") function_mock.return_value.get_entry_by_name.assert_called_with("name") self.assertEqual( actual, function_mock.return_value.get_entry_by_name.return_value ) @mock.patch("evadb.catalog.catalog_manager.FunctionCatalogService") def test_delete_function(self, function_mock): CatalogManager(MagicMock(), MagicMock()).delete_function_catalog_entry_by_name( "name" ) function_mock.return_value.delete_entry_by_name.assert_called_with("name") @mock.patch("evadb.catalog.catalog_manager.FunctionIOCatalogService") def test_get_function_outputs(self, function_mock): mock_func = function_mock.return_value.get_output_entries_by_function_id function_obj = MagicMock(spec=FunctionCatalogEntry) CatalogManager(MagicMock(), MagicMock()).get_function_io_catalog_output_entries( function_obj ) mock_func.assert_called_once_with(function_obj.row_id) @mock.patch("evadb.catalog.catalog_manager.FunctionIOCatalogService") def METHOD_NAME(self, function_mock): mock_func = function_mock.return_value.get_input_entries_by_function_id function_obj = MagicMock(spec=FunctionCatalogEntry) CatalogManager(MagicMock(), MagicMock()).get_function_io_catalog_input_entries( function_obj ) mock_func.assert_called_once_with(function_obj.row_id)
null
1,478
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkadb.endpoint import endpoint_data class ModifyElasticPlanRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'adb', '2019-03-15', 'ModifyElasticPlan','ads') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def METHOD_NAME(self): # Long return self.get_query_params().get('ResourceOwnerId') def set_ResourceOwnerId(self, ResourceOwnerId): # Long self.add_query_param('ResourceOwnerId', ResourceOwnerId) def get_ElasticPlanType(self): # String return self.get_query_params().get('ElasticPlanType') def set_ElasticPlanType(self, ElasticPlanType): # String self.add_query_param('ElasticPlanType', ElasticPlanType) def get_ElasticPlanTimeStart(self): # String return self.get_query_params().get('ElasticPlanTimeStart') def set_ElasticPlanTimeStart(self, ElasticPlanTimeStart): # String self.add_query_param('ElasticPlanTimeStart', ElasticPlanTimeStart) def get_ElasticPlanEndDay(self): # String return self.get_query_params().get('ElasticPlanEndDay') def set_ElasticPlanEndDay(self, ElasticPlanEndDay): # String self.add_query_param('ElasticPlanEndDay', ElasticPlanEndDay) def get_ElasticPlanWeeklyRepeat(self): # String return self.get_query_params().get('ElasticPlanWeeklyRepeat') def set_ElasticPlanWeeklyRepeat(self, ElasticPlanWeeklyRepeat): # String self.add_query_param('ElasticPlanWeeklyRepeat', ElasticPlanWeeklyRepeat) def get_ElasticPlanWorkerSpec(self): # String return self.get_query_params().get('ElasticPlanWorkerSpec') def set_ElasticPlanWorkerSpec(self, ElasticPlanWorkerSpec): # String self.add_query_param('ElasticPlanWorkerSpec', ElasticPlanWorkerSpec) def get_ElasticPlanEnable(self): # Boolean return self.get_query_params().get('ElasticPlanEnable') def set_ElasticPlanEnable(self, ElasticPlanEnable): # Boolean self.add_query_param('ElasticPlanEnable', ElasticPlanEnable) def get_ElasticPlanTimeEnd(self): # String return self.get_query_params().get('ElasticPlanTimeEnd') def set_ElasticPlanTimeEnd(self, ElasticPlanTimeEnd): # String self.add_query_param('ElasticPlanTimeEnd', ElasticPlanTimeEnd) def get_ElasticPlanStartDay(self): # String return self.get_query_params().get('ElasticPlanStartDay') def set_ElasticPlanStartDay(self, ElasticPlanStartDay): # String self.add_query_param('ElasticPlanStartDay', ElasticPlanStartDay) def get_ResourceOwnerAccount(self): # String return self.get_query_params().get('ResourceOwnerAccount') def set_ResourceOwnerAccount(self, ResourceOwnerAccount): # String self.add_query_param('ResourceOwnerAccount', ResourceOwnerAccount) def get_DBClusterId(self): # String return self.get_query_params().get('DBClusterId') def set_DBClusterId(self, DBClusterId): # String self.add_query_param('DBClusterId', DBClusterId) def get_OwnerAccount(self): # String return self.get_query_params().get('OwnerAccount') def set_OwnerAccount(self, OwnerAccount): # String self.add_query_param('OwnerAccount', OwnerAccount) def get_OwnerId(self): # Long return self.get_query_params().get('OwnerId') def set_OwnerId(self, OwnerId): # Long self.add_query_param('OwnerId', OwnerId) def get_ElasticPlanName(self): # String return self.get_query_params().get('ElasticPlanName') def set_ElasticPlanName(self, ElasticPlanName): # String self.add_query_param('ElasticPlanName', ElasticPlanName) def get_ResourcePoolName(self): # String return self.get_query_params().get('ResourcePoolName') def set_ResourcePoolName(self, ResourcePoolName): # String self.add_query_param('ResourcePoolName', ResourcePoolName) def get_ElasticPlanNodeNum(self): # Integer return self.get_query_params().get('ElasticPlanNodeNum') def set_ElasticPlanNodeNum(self, ElasticPlanNodeNum): # Integer self.add_query_param('ElasticPlanNodeNum', ElasticPlanNodeNum)
null
1,479
# Copyright 2019 ACSONE SA/NV (<http://acsone.eu>) # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from datetime import timedelta from odoo import api, fields from odoo.addons.shopinvader.tests.test_notification import CommonCase class TestSaleOrder(CommonCase): """ Tests for sale.order """ def setUp(self): super().setUp() self.sale_obj = self.env["sale.order"] self.sale = self.env.ref("shopinvader.sale_order_2") self.template = self.env.ref( "shopinvader_pending_cart_reminder." "mail_template_shopinvader_sale_reminder" ) self.backend.write( { "pending_cart_reminder_delay": 1, "pending_cart_reminder_template_id": self.template.id, } ) self.days_to_add = 0 def _patch_get_pending_cart_last_write_dt(self): """ :return: function """ days_to_add = self.days_to_add @api.model def _get_pending_cart_last_write_dt(self, backend): reminder_date = fields.Datetime.from_string(fields.Datetime.now()) if days_to_add: reminder_date -= timedelta(days=days_to_add) return reminder_date return _get_pending_cart_last_write_dt def _patch_sale_reminder(self): """ Do the patch (and add the cleanup) :return: bool """ _get_reminder_date = self._patch_get_pending_cart_last_write_dt() self.sale_obj._patch_method( "_get_pending_cart_last_write_dt", _get_reminder_date ) self.addCleanup(self.sale_obj._revert_method, "_get_pending_cart_last_write_dt") return True def _check_reminder_empty(self): """ Ensure the pending_cart_reminder_sent_dt is not set :return: bool """ self.assertFalse(self.sale.pending_cart_reminder_sent_dt) return True def _launch_and_check_no_changes(self): """ Ensure no changes after launching the reminder :return: bool """ values_before = self.sale.read()[0] self._patch_sale_reminder() self.sale_obj.launch_pending_cart_reminder() values_after = self.sale.read()[0] self.assertDictEqual(values_after, values_before) return True def test_reminder1(self): """ Test the reminder For this case, the sale should have a reminder :return: """ self._check_reminder_empty() now = fields.Datetime.from_string(fields.Datetime.now()) self._patch_sale_reminder() self.sale_obj.launch_pending_cart_reminder() self.assertGreaterEqual( fields.Datetime.from_string(self.sale.pending_cart_reminder_sent_dt), now, ) return def test_reminder2(self): """ Test the reminder For this case, the sale shouldn't have a reminder :return: """ self._check_reminder_empty() self.days_to_add = 4 self._patch_sale_reminder() self.sale_obj.launch_pending_cart_reminder() self._check_reminder_empty() return def METHOD_NAME(self): """ Test the reminder For this case, the sale already have a reminder and shouldn't be updated :return: """ now = fields.Datetime.now() self.sale.write({"pending_cart_reminder_sent_dt": now}) self._patch_sale_reminder() self.sale_obj.launch_pending_cart_reminder() self.assertEqual(self.sale.pending_cart_reminder_sent_dt, now) return def test_reminder4(self): """ Test the reminder For this case, the sale is not a cart (but a "normal" sale) :return: """ self.sale.write({"typology": "sale"}) self._launch_and_check_no_changes() return def test_reminder5(self): """ Test the reminder For this case, the partner of the sale is the anonymous user (so no email) :return: """ self.sale.write({"partner_id": self.backend.anonymous_partner_id.id}) self._launch_and_check_no_changes() return
null
1,480
# -*- coding: utf-8 -*- """Handle app url related tests. Copyright (C) 2021 Gitcoin Core This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from secrets import token_hex from django.urls import resolve, reverse from test_plus.test import TestCase class AppUrlsTestCase(TestCase): """Define tests for app urls.""" def setUp(self): self.user = self.make_user() def test_robotstxt_reverse(self): """Test the robotstxt url and check the reverse.""" self.assertEqual(reverse('robotstxt'), '/robots.txt') def test_robotstxt_resolve(self): """Test the robotstxt url and check the resolution.""" self.assertEqual(resolve('/robots.txt').view_name, 'robotstxt') self.assertEqual(resolve('/robots.txt/').view_name, 'robotstxt') def METHOD_NAME(self): """Test the sitemap url and check the reverse.""" self.assertEqual(reverse('django.contrib.sitemaps.views.index'), '/sitemap.xml') def test_sitemap_resolve(self): """Test the sitemap url and check the resolution.""" self.assertEqual(resolve('/sitemap.xml').view_name, 'django.contrib.sitemaps.views.index') def test_email_settings_reverse(self): """Test the email_settings url and check the reverse.""" priv_key = token_hex(16)[:29] self.assertEqual(reverse('email_settings', args=(priv_key, )), f'/settings/email/{priv_key}') def test_email_settings_resolve(self): """Test the email_settings url and check the resolution.""" self.assertEqual(resolve('/settings/email/').view_name, 'email_settings') def test_leaderboard_reverse(self): """Test the leaderboard url and check the reverse.""" self.assertEqual(reverse('leaderboard', args=('quarterly_earners', )), '/leaderboard/quarterly_earners') def test_leaderboard_resolve(self): """Test the leaderboard url and check the resolution.""" self.assertEqual(resolve('/leaderboard/').view_name, 'leaderboard') def test__leaderboard_reverse(self): """Test the _leaderboard url and check the reverse.""" self.assertEqual(reverse('_leaderboard'), '/leaderboard') def test__leaderboard_resolve(self): """Test the _leaderboard url and check the resolution.""" self.assertEqual(resolve('/leaderboard').view_name, '_leaderboard') def test_stats_reverse(self): """Test the stats url and check the reverse.""" self.assertEqual(reverse('stats'), '/_administration/stats/') def test_stats_resolve(self): """Test the stats url and check the resolution.""" self.assertEqual(resolve('/_administration/stats/').view_name, 'stats') def test_explorer_reverse(self): """Test the explorer url and check the reverse.""" self.assertEqual(reverse('explorer'), '/explorer') def test_explorer_resolve(self): """Test the explorer url and check the resolution.""" self.assertEqual(resolve('/explorer').view_name, 'explorer') self.assertEqual(resolve('/explorer/').view_name, 'explorer') def test_new_bounty_reverse(self): """Test the new_bounty url and check the reverse.""" self.assertEqual(reverse('new_bounty'), '/bounty/new') def test_new_bounty_resolve(self): """Test the new_bounty url and check the resolution.""" self.assertEqual(resolve('/bounty/new').view_name, 'new_bounty') self.assertEqual(resolve('/bounty/new/').view_name, 'new_bounty') def test_uniterested_reverse(self): """Test the uninterested url and check the reverse""" self.assertEqual(reverse('uninterested', args=[1, 2]), '/actions/bounty/1/interest/2/uninterested/') def test_uniterested_resolve(self): """Test the uninterested url and check the resolution""" self.assertEqual(resolve('/actions/bounty/1/interest/2/uninterested/').view_name, 'uninterested')
null
1,481
# -*- coding: utf-8 -*- from flare import html5 from flare.button import Button from flare.popup import Confirm from flare.network import NetworkService from vi.priorityqueue import actionDelegateSelector from vi.config import conf from flare.i18n import translate # ShopMarkAction ------------------------------------------------------------------------------------------------------- class ShopMarkAction( Button ): def __init__(self, action, title, cls="", txtQuestion=None, txtSuccess=None, txtFailure=None, *args, **kwargs): super( ShopMarkAction, self ).__init__( translate( title )) #self["class"] = "icon order_markpayed" self["disabled"] = True self.isDisabled = True self.action = action self.txtQuestion = txtQuestion self.txtSuccess = txtSuccess self.txtFailure = txtFailure self.done = 0 self.failed = 0 self.total = 0 def METHOD_NAME(self): super(ShopMarkAction,self).METHOD_NAME() self.parent().parent().selectionChangedEvent.register( self ) def onDetach(self): self.parent().parent().selectionChangedEvent.unregister( self ) super(ShopMarkAction,self).onDetach() def onSelectionChanged(self, table, selection, *args,**kwargs ): if len( selection ): if self.isDisabled: self.isDisabled = False self["disabled"] = False else: if not self.isDisabled: self["disabled"] = True self.isDisabled = True def setPayed(self, order ): NetworkService.request( self.parent().parent().module, self.action, { "key": order[ "key" ] }, secure=True, successHandler=self.setPayedSucceeded, failureHandler=self.setPayedFailed ) def setPayedSucceeded(self, response): self.done += 1 if self.done + self.failed == self.total: conf["mainWindow"].log("success", translate( self.txtSuccess, count=self.done ) ) NetworkService.notifyChange( self.parent().parent().module ) def setPayedFailed(self, response): conf["mainWindow"].log( "error", translate( self.txtFailure ) ) self.failed += 1 def doMarkPayed( self, *args, **kwargs ): selection = self.parent().parent().getCurrentSelection() if not selection: return self.done = 0 self.total = len( selection ) for item in selection: self.setPayed( item ) def onClick(self, sender=None): selection = self.parent().parent().getCurrentSelection() if not selection: return Confirm( translate( self.txtQuestion, count=len( selection ) ), title=translate( "Mark payed" ), yesCallback=self.doMarkPayed ) # ShopMarkPayedAction -------------------------------------------------------------------------------------------------- class ShopMarkPayedAction( ShopMarkAction ): def __init__(self, *args, **kwargs): super( ShopMarkPayedAction, self ).__init__( "markPayed", "Mark payed", cls="order_markpayed", txtQuestion = "Do you really want to mark {{count}} orders as payed?", txtSuccess = "{{count}} orders had been successfully set as payed.", txtFailure= "Failed to mark order payed" ) @staticmethod def isSuitableFor( module, handler, actionName ): return actionName == "markpayed" and handler == "list.order" actionDelegateSelector.insert( 1, ShopMarkPayedAction.isSuitableFor, ShopMarkPayedAction ) # ShopMarkSentAction --------------------------------------------------------------------------------------------------- class ShopMarkSentAction( ShopMarkAction ): def __init__(self, *args, **kwargs): super( ShopMarkSentAction, self ).__init__( "markSend", "Mark sent", cls="order_marksent", txtQuestion = "Do you really want to mark {{count}} orders as sent?", txtSuccess = "{{count}} orders had been successfully set as sent.", txtFailure = "Failed to mark order sent" ) @staticmethod def isSuitableFor( module, handler, actionName ): return actionName == "marksent" and handler == "list.order" actionDelegateSelector.insert( 1, ShopMarkSentAction.isSuitableFor, ShopMarkSentAction ) # ShopMarkCanceledAction ----------------------------------------------------------------------------------------------- class ShopMarkCanceledAction( ShopMarkAction ): def __init__(self, *args, **kwargs): super( ShopMarkCanceledAction, self ).__init__( "markCanceled", "Mark canceled", cls="order_markcanceled", txtQuestion = "Do you really want to cancel {{count}} orders?", txtSuccess = "{{count}} orders had been successfully canceled.", txtFailure = "Failed to cancel order" ) @staticmethod def isSuitableFor( module, handler, actionName ): return actionName == "markcanceled" and handler == "list.order" actionDelegateSelector.insert( 1, ShopMarkCanceledAction.isSuitableFor, ShopMarkCanceledAction )
null
1,482
""" Import proctored exam grades from edx """ import csv import argparse from django.contrib.auth.models import User from django.core.management import BaseCommand, CommandError from courses.models import Course from exams.models import ExamRun, ExamAuthorization from exams.constants import EXAM_GRADE_PASS, BACKEND_MITX_ONLINE from grades.models import ProctoredExamGrade from micromasters.utils import now_in_utc from social_django.models import UserSocialAuth class Command(BaseCommand): """Parses a csv with exam grades creating or updating ProctoredExamGrade""" help = "Parses a csv with exam grades and creates ProctoredExamGrade" def METHOD_NAME(self, parser): parser.add_argument('csvfile', type=argparse.FileType('r'), help='') def handle(self, *args, **kwargs): # pylint: disable=unused-argument,too-many-locals csvfile = kwargs.get('csvfile') reader = csv.DictReader(csvfile) grade_count = 0 existing_grades = 0 for row in reader: try: user_social_auth = UserSocialAuth.objects.get(uid=row['username'], provider=BACKEND_MITX_ONLINE) except UserSocialAuth.DoesNotExist: self.stdout.write( self.style.ERROR('Could not find social auth for user for username {}'.format(row['username'])) ) continue user = user_social_auth.user course_id = row['course_id'] try: course = Course.objects.get(id=course_id) except Course.DoesNotExist: raise CommandError( 'Could not find a course with number "{}"'.format(course_id) ) # should pick the latest past exam run now = now_in_utc() exam_run = ExamRun.objects.filter( course=course, date_first_schedulable__lte=now ).order_by('-date_last_schedulable').first() if exam_run is None: raise CommandError( 'There are no eligible exam runs for course "{}"'.format(course.title) ) try: exam_authorization = ExamAuthorization.objects.get(user=user, exam_run=exam_run) except ExamAuthorization.DoesNotExist: self.stdout.write( self.style.ERROR('Could not find authorization for user {} and exam run {}'.format( user.username, exam_run.id )) ) continue if int(row['no_show']): exam_authorization.exam_taken = True exam_authorization.exam_no_show = True exam_authorization.save() else: try: score = float(row['score']) except ValueError: self.stdout.write( self.style.ERROR('Failed to create grade: empty score for user {} and exam run {}'.format( user.username, exam_run.id )) ) continue defaults = { 'passing_score': exam_run.passing_score, 'score': score, 'grade': row['grade'], 'percentage_grade': score / 100.0 if score else 0, 'passed': row['grade'].lower() == EXAM_GRADE_PASS, 'row_data': row, 'exam_date': now_in_utc() } _, created = ProctoredExamGrade.objects.update_or_create( user=user, course=course, exam_run=exam_run, defaults=defaults ) if created: grade_count += 1 exam_authorization.exam_taken = True exam_authorization.save() else: existing_grades += 1 result_messages = [ 'Total exam grades created: {}'.format(grade_count), 'Total number of modified grades: {}'.format(existing_grades) ] self.stdout.write(self.style.SUCCESS('\n'.join(result_messages)))
null
1,483
from boa3_test.tests.boa_test import BoaTest # needs to be the first import to avoid circular imports from boa3.internal.exception import CompilerError, CompilerWarning from boa3.internal.neo.vm.opcode.Opcode import Opcode from boa3.internal.neo.vm.type.Integer import Integer from boa3.internal.neo.vm.type.String import String from boa3.internal.neo3.vm import VMState from boa3_test.test_drive.testrunner.neo_test_runner import NeoTestRunner class TestTyping(BoaTest): default_folder: str = 'test_sc/typing_test' def test_cast_to_int(self): expected_output = ( Opcode.INITSLOT # function signature + b'\x01' + b'\x01' + Opcode.LDARG0 # x = cast(int, value) + Opcode.STLOC0 + Opcode.LDLOC0 # return x + Opcode.RET ) path = self.get_contract_path('CastToInt.py') output = self.assertCompilerLogs(CompilerWarning.TypeCasting, path) self.assertEqual(expected_output, output) def test_cast_to_str(self): expected_output = ( Opcode.INITSLOT # function signature + b'\x01' + b'\x01' + Opcode.LDARG0 # x = cast(str, value) + Opcode.STLOC0 + Opcode.LDLOC0 # return x + Opcode.RET ) path = self.get_contract_path('CastToStr.py') output = self.assertCompilerLogs(CompilerWarning.TypeCasting, path) self.assertEqual(expected_output, output) def test_cast_to_list(self): expected_output = ( Opcode.INITSLOT # function signature + b'\x01' + b'\x01' + Opcode.LDARG0 # x = cast(list, value) + Opcode.STLOC0 + Opcode.LDLOC0 # return x + Opcode.RET ) path = self.get_contract_path('CastToList.py') output = self.assertCompilerLogs(CompilerWarning.TypeCasting, path) self.assertEqual(expected_output, output) def test_cast_to_typed_list(self): expected_output = ( Opcode.INITSLOT # function signature + b'\x01' + b'\x01' + Opcode.LDARG0 # x = cast(List[int], value) + Opcode.STLOC0 + Opcode.LDLOC0 # return x[0] + Opcode.PUSH0 + Opcode.PICKITEM + Opcode.RET ) path = self.get_contract_path('CastToTypedList.py') output = self.assertCompilerLogs(CompilerWarning.TypeCasting, path) self.assertEqual(expected_output, output) def test_cast_to_dict(self): expected_output = ( Opcode.INITSLOT + b'\x01' + b'\x01' + Opcode.LDARG0 # x = cast(dict, value) + Opcode.STLOC0 + Opcode.LDLOC0 # return x + Opcode.RET ) path = self.get_contract_path('CastToDict.py') output = self.assertCompilerLogs(CompilerWarning.TypeCasting, path) self.assertEqual(expected_output, output) def test_cast_to_typed_dict(self): string = String('example').to_bytes() expected_output = ( Opcode.INITSLOT + b'\x01' + b'\x01' + Opcode.LDARG0 # x = cast(Dict[str, int], value) + Opcode.STLOC0 + Opcode.LDLOC0 # return x['example'] + Opcode.PUSHDATA1 + Integer(len(string)).to_byte_array(min_length=1) + string + Opcode.PICKITEM + Opcode.RET ) path = self.get_contract_path('CastToTypedDict.py') output = self.assertCompilerLogs(CompilerWarning.TypeCasting, path) self.assertEqual(expected_output, output) def test_cast_mismatched_type(self): path = self.get_contract_path('CastMismatchedType.py') self.assertCompilerLogs(CompilerError.MismatchedTypes, path) def test_cast_to_uint160(self): path = self.get_contract_path('CastToUInt160.py') self.assertCompilerLogs(CompilerWarning.TypeCasting, path) path, _ = self.get_deploy_file_paths(path) runner = NeoTestRunner(runner_id=self.method_name()) invokes = [] expected_results = [] value = bytes(range(20)) invokes.append(runner.call_contract(path, 'Main', value, expected_result_type=bytes)) expected_results.append(value) runner.execute() self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error) for x in range(len(invokes)): self.assertEqual(expected_results[x], invokes[x].result) def METHOD_NAME(self): expected_output = ( Opcode.INITSLOT # function signature + b'\x01' + b'\x01' + Opcode.LDARG0 # x = cast(Transaction, value) + Opcode.STLOC0 + Opcode.LDLOC0 # return x + Opcode.RET ) path = self.get_contract_path('CastToTransaction.py') output = self.assertCompilerLogs(CompilerWarning.TypeCasting, path) self.assertEqual(expected_output, output) def test_cast_inside_if(self): path, _ = self.get_deploy_file_paths('CastInsideIf.py') runner = NeoTestRunner(runner_id=self.method_name()) invokes = [] expected_results = [] invokes.append(runner.call_contract(path, 'main')) expected_results.append('body') runner.execute() self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error) for x in range(len(invokes)): self.assertEqual(expected_results[x], invokes[x].result) def test_cast_persisted_in_scope(self): path, _ = self.get_deploy_file_paths('CastPersistedInScope.py') runner = NeoTestRunner(runner_id=self.method_name()) invokes = [] expected_results = [] test_address = bytes(20) invokes.append(runner.call_contract(path, 'main', test_address, 10, None)) expected_results.append(None) runner.execute() self.assertEqual(VMState.HALT, runner.vm_state, msg=runner.error) for x in range(len(invokes)): self.assertEqual(expected_results[x], invokes[x].result)
null
1,484
# -*- coding: utf-8 -*- """ requests.api ~~~~~~~~~~~~ This module implements the Requests API. :copyright: (c) 2012 by Kenneth Reitz. :license: Apache2, see LICENSE for more details. """ from . import sessions def request(method, url, **kwargs): """Constructs and sends a :class:`Request <Request>`. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How many seconds to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response <Response>` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'http://httpbin.org/get') <Response [200]> """ # By using the 'with' statement we are sure the session is closed, thus we # avoid leaving sockets open which can trigger a ResourceWarning in some # cases, and look like a memory leak in others. with sessions.Session() as session: return session.request(method=method, url=url, **kwargs) def METHOD_NAME(url, params=None, **kwargs): r"""Sends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return request('get', url, params=params, **kwargs) def options(url, **kwargs): r"""Sends an OPTIONS request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return request('options', url, **kwargs) def head(url, **kwargs): r"""Sends a HEAD request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', False) return request('head', url, **kwargs) def post(url, data=None, json=None, **kwargs): r"""Sends a POST request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): r"""Sends a PUT request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('put', url, data=data, **kwargs) def patch(url, data=None, **kwargs): r"""Sends a PATCH request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('patch', url, data=data, **kwargs) def delete(url, **kwargs): r"""Sends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('delete', url, **kwargs)
null
1,485
from ..base.twilltestcase import ( common, ShedTwillTestCase, ) repo_name = "filtering_0000" repo_description = "Galaxy's filtering tool" class TestBasicToolShedFeatures(ShedTwillTestCase): """Test installing a basic repository.""" def METHOD_NAME(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0000 category and upload the filtering repository to it, if necessary.""" self.login(email=common.admin_email, username=common.admin_username) category = self.create_category( name="Test 0000 Basic Repository Features 2", description="Test Description 0000 Basic Repository Features 2", ) category = self.create_category( name="Test 0000 Basic Repository Features 1", description="Test Description 0000 Basic Repository Features 1", ) self.login(email=common.test_user_1_email, username=common.test_user_1_name) repository = self.get_or_create_repository( name=repo_name, description=repo_description, long_description="Long description of Galaxy's filtering tool", owner=common.test_user_1_name, category=category, ) if self.repository_is_new(repository): self.upload_file( repository, filename="filtering/filtering_1.1.0.tar", filepath=None, valid_tools_only=True, uncompress_file=True, remove_repo_files_not_in_tar=False, commit_message="Uploaded filtering 1.1.0 tarball.", strings_displayed=[], strings_not_displayed=[], ) self.upload_file( repository, filename="filtering/filtering_0000.txt", filepath=None, valid_tools_only=True, uncompress_file=False, remove_repo_files_not_in_tar=False, commit_message="Uploaded readme for 1.1.0", strings_displayed=[], strings_not_displayed=[], ) self.upload_file( repository, filename="filtering/filtering_2.2.0.tar", filepath=None, valid_tools_only=True, uncompress_file=True, remove_repo_files_not_in_tar=False, commit_message="Uploaded filtering 2.2.0 tarball.", strings_displayed=[], strings_not_displayed=[], ) self.upload_file( repository, filename="readme.txt", filepath=None, valid_tools_only=True, uncompress_file=False, remove_repo_files_not_in_tar=False, commit_message="Uploaded readme for 2.2.0", strings_displayed=[], strings_not_displayed=[], ) def test_0010_browse_tool_sheds(self): """Browse the available tool sheds in this Galaxy instance.""" self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed( url=self.url, strings_displayed=["Test 0000 Basic Repository Features 1", "Test 0000 Basic Repository Features 2"], ) def test_0015_browse_test_0000_category(self): """Browse the category created in test 0000. It should contain the filtering_0000 repository also created in that test.""" category = self.populator.get_category_with_name("Test 0000 Basic Repository Features 1") self.browse_category(category, strings_displayed=[repo_name]) def test_0020_preview_filtering_repository(self): """Load the preview page for the filtering_0000 repository in the tool shed.""" self.preview_repository_in_tool_shed( repo_name, common.test_user_1_name, strings_displayed=[repo_name, "Valid tools"] ) def test_0025_install_filtering_repository(self): self._install_repository( repo_name, common.test_user_1_name, "Test 0000 Basic Repository Features 1", new_tool_panel_section_label="test_1000", ) installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repo_name, common.test_user_1_name ) changeset = str(installed_repository.installed_changeset_revision) assert self.get_installed_repository_for(common.test_user_1, repo_name, changeset) self._assert_has_valid_tool_with_name("Filter1") self._assert_repo_has_tool_with_id(installed_repository, "Filter1") def test_0030_install_filtering_repository_again(self): """Attempt to install the already installed filtering repository.""" installed_repository = self.test_db_util.get_installed_repository_by_name_owner( repo_name, common.test_user_1_name ) # Just make sure the repo is still installed, used to monitoring tests but we've # removed that page. self._install_repository( repo_name, common.test_user_1_name, "Test 0000 Basic Repository Features 1", ) changeset = str(installed_repository.installed_changeset_revision) assert self.get_installed_repository_for(common.test_user_1, repo_name, changeset) def test_0035_verify_installed_repository_metadata(self): """Verify that resetting the metadata on an installed repository does not change the metadata.""" self.verify_installed_repository_metadata_unchanged(repo_name, common.test_user_1_name)
null
1,486
# Copyright (c) 2023 Mira Geoscience Ltd. # # This file is part of geoh5py. # # geoh5py is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # geoh5py is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with geoh5py. If not, see <https://www.gnu.org/licenses/>. # pylint: disable=unused-argument,no-self-use,no-name-in-module # flake8: noqa from __future__ import annotations from dataclasses import dataclass from enum import IntEnum from . import shared class InvalidDataOperation(Exception): message: str | None = "" class BadPrimitiveType(Exception): message: str | None = "" class DataAssociation(IntEnum): UNKNOWN = 0 OBJECT = 1 CELL = 2 FACE = 3 VERTEX = 4 class PrimitiveType(IntEnum): UNKNOWN = 0 INTEGER = 1 FLOAT = 2 REFERENCED = 3 TEXT = 4 FILENAME = 5 DATETIME = 6 BLOB = 7 @dataclass class Data: entity_: shared.Entity | None = None association: int | None = None @dataclass class DataUnit: unit: str | None = "" @dataclass class DataType: uid: shared.Uuid | None = None name: str | None = None description: str | None = "" units: DataUnit | None = None primitive_type: int | None = None @dataclass class DataSlab: start: int | None = 0 stride: int | None = 1 count: int | None = 0 block: int | None = 1 @dataclass class ReferencedDataEntry: key: int | None = None value: str | None = None @dataclass class ReferencedValues: indices: list[int] | None = None entries: list[ReferencedDataEntry] | None = None @dataclass class DataQuery: name: str | None = None object_or_group: shared.Uuid | None = None data_type: shared.Uuid | None = None primitive_type: int | None = None association: int | None = None @dataclass class DataTypeQuery: name: str | None = None primitive_type: int | None = None units: DataUnit | None = None class DataService: def get_all( self, ) -> list[Data]: ... def find( self, query: DataQuery, ) -> list[Data]: ... def get( self, uid: shared.Uuid, ) -> Data: ... def get_float_values( self, data: shared.Uuid, slab: DataSlab, ) -> list[float]: ... def get_integer_values( self, data: shared.Uuid, slab: DataSlab, ) -> list[int]: ... def get_text_values( self, data: shared.Uuid, slab: DataSlab, ) -> list[str]: ... def get_referenced_values( self, data: shared.Uuid, slab: DataSlab, ) -> ReferencedValues: ... def get_datetime_values( self, data: shared.Uuid, slab: DataSlab, ) -> list[str]: ... def get_filename_values( self, data: shared.Uuid, slab: DataSlab, ) -> list[str]: ... def get_file_content( self, data: shared.Uuid, file_name: str, ) -> str: ... def get_blob_values( self, data: shared.Uuid, slab: DataSlab, ) -> list[int]: ... def METHOD_NAME( self, data: shared.Uuid, index: int, ) -> str: ... def get_all_types( self, ) -> list[DataType]: ... def find_types( self, query: DataTypeQuery, ) -> list[DataType]: ... def get_type( self, uid: shared.Uuid, ) -> DataType: ... def set_public( self, entities: list[shared.Uuid], is_public: bool, ) -> None: ... def set_visible( self, entities: list[shared.Uuid], visible: bool, ) -> None: ... def set_allow_delete( self, entities: list[shared.Uuid], allow: bool, ) -> None: ... def set_allow_rename( self, entities: list[shared.Uuid], allow: bool, ) -> None: ... def rename( self, entities: shared.Uuid, new_name: str, ) -> None: ...
null
1,487
import gdb import gdb.xmethod import gdb.types def is_wasm_pointer_type(type): return type.name is not None and type.name.startswith('__wasm_pointer_t') def is_wasm_reference_type(type): return type.name is not None and type.name.startswith('__wasm_reference_t') def is_wasm_rvalue_reference_type(type): return type.name is not None and type.name.startswith('__wasm_rvalue_reference_t') def translate_address(ptr, type=None): if type is None: type = ptr.type.strip_typedefs().template_argument(0) frame = gdb.selected_frame() linear_memory_base = frame.read_register('rsi') native_address = linear_memory_base + ptr["__address"].cast(linear_memory_base.type) return native_address.reinterpret_cast(type.pointer()) class WasmPointerWorker_deref(gdb.xmethod.XMethodWorker): def __init__(self, class_type): gdb.xmethod.XMethodWorker.__init__(self) self.class_type = class_type def get_arg_types(self): return None def get_result_type(self, obj): return obj.type.template_argument(0) def __call__(self, obj): return translate_address(obj, self.class_type.template_argument(0)).dereference() class WasmPointerWorker_arrow(gdb.xmethod.XMethodWorker): def __init__(self, class_type): gdb.xmethod.XMethodWorker.__init__(self) self.class_type = class_type def get_arg_types(self): return None def get_result_type(self, obj): return obj.type.template_argument(0) def __call__(self, obj): return translate_address(obj, self.class_type.template_argument(0)) class WasmPointer_deref(gdb.xmethod.XMethod): def __init__(self): gdb.xmethod.XMethod.__init__(self, 'operator*') def get_worker(self, method_name, class_type): if method_name == 'operator*': return WasmPointerWorker_deref(class_type) class WasmPointer_arrow(gdb.xmethod.XMethod): def __init__(self): gdb.xmethod.XMethod.__init__(self, 'operator->') def get_worker(self, method_name, class_type): if method_name == 'operator->': return WasmPointerWorker_arrow(class_type) class WasmPointerMatcher(gdb.xmethod.XMethodMatcher): def __init__(self): gdb.xmethod.XMethodMatcher.__init__(self, "__wasm_pointer_t") self.methods = [WasmPointer_deref(), WasmPointer_arrow()] def match(self, class_type, method_name): class_type = class_type.strip_typedefs() if not is_wasm_pointer_type(class_type): return None result = [] for method in self.methods: if method.enabled: worker = method.get_worker(method_name, class_type) if worker is not None: result.append(worker) return result class WasmPointerTypePrinterImpl(object): def recognize(self, type): if is_wasm_pointer_type(type): return str(type.template_argument(0).pointer()) elif is_wasm_reference_type(type): return str(type.template_argument(0).reference()) elif is_wasm_rvalue_reference_type(type): return str(type.template_argument(0)) + " &&" class WasmPointerTypePrinter(gdb.types.TypePrinter): def __init__(self): gdb.types.TypePrinter.__init__(self, "__wasm_pointer_t") def instantiate(self): return WasmPointerTypePrinterImpl() class WasmPointerPrinter(object): """Print a wasm pointer""" def __init__(self, val): self.val = val def to_string(self): return str(self.val["__address"]) def native_pretty_printer(val): if is_wasm_pointer_type(val.type.strip_typedefs()): return WasmPointerPrinter(val) return None if hasattr(gdb, "Parameter"): class HideNative(gdb.Parameter): """Controls whether native frames and functions are visible""" def __init__(self, name): super (HideNative, self).__init__(name, gdb.COMMAND_BREAKPOINTS, gdb.PARAM_BOOLEAN) self.value = True else: class HideNative(object): def __init__(self, name): self.value = True hide_native = HideNative("hide-native") def is_wasm_frame(frame): sal = frame.find_sal() if (not sal.is_valid()): return False return sal.symtab is None or not sal.symtab.objfile.is_file class WasmFilter: def __init__(self): self.name = "wasm-only" self.priority = 100 self.enabled = True def filter(self, frame_iter): if hide_native.value: return (x for x in frame_iter if is_wasm_frame(x.inferior_frame())) else: return frame_iter wasm_filter = WasmFilter() def is_wasm_address(pc): sal = gdb.current_progspace().find_pc_line(pc) if (not sal.is_valid()): return False return sal.symtab is None or not sal.symtab.objfile.is_file def disable_native_breakpoints(breakpoint): if hide_native.value and breakpoint.visible: for loc in breakpoint.locations: if loc.enabled and not is_wasm_address(loc.address): loc.enabled = False def METHOD_NAME(objfile): gdb.xmethod.register_xmethod_matcher(objfile, WasmPointerMatcher()) gdb.types.register_type_printer(objfile, WasmPointerTypePrinter()) objfile.pretty_printers.append(native_pretty_printer) objfile.frame_filters[wasm_filter.name] = wasm_filter if hasattr(gdb, "Parameter"): gdb.events.breakpoint_created.connect(disable_native_breakpoints) gdb.events.breakpoint_modified.connect(disable_native_breakpoints) gdb.execute("set breakpoint pending on")
null
1,488
# coding=utf-8 # Copyright 2023 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for tensorflow_datasets.core.shuffle.""" import collections import contextlib import logging import resource import tempfile from absl.testing.absltest import mock import pytest from tensorflow_datasets import testing from tensorflow_datasets.core import shuffle from tensorflow_datasets.core.utils.lazy_imports_utils import tensorflow as tf _ITEMS = [ (1, b'The'), (2, b'quick '), (3, b'brown'), (4, b' fox '), (5, b'jumps'), ('6', b'over'), (b'7', b' the '), (8, b'lazy'), (9, b' dog.'), ] _ORDERED_ITEMS_SPLIT1 = [ b' fox ', b'The', b'over', b'quick ', b'lazy', b'jumps', b' the ', b' dog.', b'brown', ] _ORDERED_ITEMS_SPLIT2 = [ b' dog.', b'quick ', b'jumps', b' fox ', b' the ', b'brown', b'over', b'lazy', b'The', ] _SORTED_ITEMS = [ (1, b'The'), (2, b'quick '), (3, b'brown'), (4, b' fox '), (5, b'jumps'), (6, b'over'), (7, b' the '), (8, b'lazy'), (9, b' dog.'), ] _TOTAL_SIZE = sum(len(rec) for rec in _ORDERED_ITEMS_SPLIT1) @contextlib.contextmanager def disable_opening_files(): """Context manager to disable opening new files.""" soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE) try: resource.setrlimit(resource.RLIMIT_NOFILE, (1, hard_limit)) yield finally: resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) @pytest.mark.parametrize( [ 'num_keys', 'num_buckets', 'max_hkey', 'expected_non_empty_shards', 'expected_min_bucket_size', 'expected_max_bucket_size', ], [ (10, 2, 9, 2, 5, 5), (10, 3, 9, 3, 3, 4), (1024, 10, 1023, 10, 102, 103), (10, 2, 99, 1, 0, 10), ], ) def test_get_bucket_number( num_keys, num_buckets, max_hkey, expected_non_empty_shards, expected_min_bucket_size, expected_max_bucket_size, ): shards = [ shuffle.get_bucket_number( hkey=k, num_buckets=num_buckets, max_hkey=max_hkey ) for k in range(num_keys) ] # Check shard(x) <= shard(y) if x < y. previous_shard = 0 for shard in shards: assert shard >= previous_shard previous_shard = shard # Check distribution: all shards are used. counts = collections.Counter(shards) assert len(counts) == expected_non_empty_shards for bucket_size in counts.values(): assert bucket_size >= expected_min_bucket_size assert bucket_size <= expected_max_bucket_size def test_get_bucket_number_large_hkey(): bucket = shuffle.get_bucket_number( hkey=314755909755515592000481005244904880883, num_buckets=5, max_hkey=314755909755515592000481005244904880883, ) assert bucket == 4 def METHOD_NAME(caplog): with disable_opening_files(): with pytest.raises(OSError) as exc_info: tempfile.TemporaryFile() assert exc_info.value.strerror == 'Too many open files' shuffle._increase_open_files_limit() assert caplog.record_tuples == [( 'absl', logging.WARNING, ( 'Soft limit for the maximum number of open file descriptors for the' ' current process increased from 1 to 1001' ), )] tempfile.TemporaryFile() _, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE) resource.setrlimit(resource.RLIMIT_NOFILE, (hard_limit, hard_limit)) caplog.clear() shuffle._increase_open_files_limit() assert caplog.record_tuples == [( 'absl', logging.ERROR, ( 'Soft and hard limits for the maximum number of open file descriptors' ' for the current process are identical.' ), )] def test_shuffler_with_limited_open_files(tmp_path, monkeypatch, caplog): monkeypatch.setattr(shuffle, 'MAX_MEM_BUFFER_SIZE', 0) shuffler = shuffle.Shuffler(tmp_path, 'salt', disable_shuffling=False) # trigger Tensorflow imports before disabling opening files tf.io.gfile.GFile # pylint: disable=pointless-statement tf.errors.ResourceExhaustedError # pylint: disable=pointless-statement with disable_opening_files(): shuffler.add(1, b'The') assert caplog.record_tuples == [( 'absl', logging.WARNING, ( 'Soft limit for the maximum number of open file descriptors for the' ' current process increased from 1 to 1001' ), )] class ShuffleTest(testing.TestCase): def _test_items(self, salt, items, expected_order, disable_shuffling=False): shuffler = shuffle.Shuffler(self.get_temp_dir(), salt, disable_shuffling) for key, item in items: shuffler.add(key, item) self.assertEqual(shuffler.size, _TOTAL_SIZE) if not shuffler._in_memory: # Check size of temporary bucket files expected_size = (16 + 8) * len(items) + sum(len(t[1]) for t in items) size = 0 for bucket in shuffler._buckets: if not bucket._fobj: continue bucket._fobj.close() with open(bucket._path, 'rb') as f: size += len(f.read()) self.assertEqual(size, expected_size) # Check records can be read as expected: records = list(ex for _, ex in shuffler) self.assertEqual(records, expected_order) def test_all_mem(self): self._test_items('split1', _ITEMS, _ORDERED_ITEMS_SPLIT1) self._test_items('split2', _ITEMS, _ORDERED_ITEMS_SPLIT2) @mock.patch.object(shuffle, 'MAX_MEM_BUFFER_SIZE', 0) def test_disk(self): self._test_items('split1', _ITEMS, _ORDERED_ITEMS_SPLIT1) self._test_items('split2', _ITEMS, _ORDERED_ITEMS_SPLIT2) def test_sorted_by_key(self): self._test_items( 'split1', _SORTED_ITEMS, [value for _, value in _SORTED_ITEMS], disable_shuffling=True, ) def test_nonbytes(self): shuffler = shuffle.Shuffler(self.get_temp_dir(), 'split1') with self.assertRaisesWithPredicateMatch(AssertionError, 'Only bytes'): shuffler.add(1, 'a') with self.assertRaisesWithPredicateMatch(AssertionError, 'Only bytes'): shuffler.add(1, 123) def test_duplicate_key(self): shuffler = shuffle.Shuffler(self.get_temp_dir(), 'split1') shuffler.add(1, b'a') shuffler.add(2, b'b') shuffler.add(1, b'c') iterator = iter(shuffler) self.assertEqual( next(iterator), (86269847664267119453139349052967691808, b'a') ) with self.assertRaises(shuffle.DuplicatedKeysError): next(iterator) if __name__ == '__main__': testing.test_main()
null
1,489
""" SoftLayer.tests.managers.account_tests ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """ from unittest import mock as mock from SoftLayer.managers.account import AccountManager as AccountManager from SoftLayer import SoftLayerAPIError from SoftLayer import testing class AccountManagerTests(testing.TestCase): def set_up(self): self.manager = AccountManager(self.client) self.SLNOE = 'SoftLayer_Notification_Occurrence_Event' def test_get_summary(self): self.manager.get_summary() self.assert_called_with('SoftLayer_Account', 'getObject') def test_get_planned_upcoming_events(self): self.manager.get_upcoming_events("PLANNED") self.assert_called_with(self.SLNOE, 'getAllObjects') def test_get_unplanned_upcoming_events(self): self.manager.get_upcoming_events("UNPLANNED_INCIDENT") self.assert_called_with(self.SLNOE, 'getAllObjects') def test_get_announcement_upcoming_events(self): self.manager.get_upcoming_events("ANNOUNCEMENT") self.assert_called_with(self.SLNOE, 'getAllObjects') def test_add_planned_event_filter(self): event_type = 'PLANNED' _filter = { 'notificationOccurrenceEventType': { 'keyName': { 'operation': event_type } } } self.manager.add_event_filter(_filter, event_type) def test_add_unplanned_event_filter(self): event_type = 'UNPLANNED_INCIDENT' _filter = { 'notificationOccurrenceEventType': { 'keyName': { 'operation': event_type } } } self.manager.add_event_filter(_filter, event_type) def test_add_announcement_event_filter(self): event_type = 'ANNOUNCEMENT' _filter = { 'notificationOccurrenceEventType': { 'keyName': { 'operation': event_type } } } self.manager.add_event_filter(_filter, event_type) def test_ack_event(self): self.manager.ack_event(12345) self.assert_called_with(self.SLNOE, 'acknowledgeNotification', identifier=12345) def test_get_event(self): self.manager.get_event(12345) self.assert_called_with(self.SLNOE, 'getObject', identifier=12345) def test_get_invoices(self): self.manager.get_invoices() self.assert_called_with('SoftLayer_Account', 'getInvoices') def test_get_invoices_closed(self): self.manager.get_invoices(closed=True) _filter = { 'invoices': { 'createDate': { 'operation': 'orderBy', 'options': [{ 'name': 'sort', 'value': ['DESC'] }] } } } self.assert_called_with('SoftLayer_Account', 'getInvoices', filter=_filter) def test_get_billing_items(self): self.manager.get_billing_items(12345) self.assert_called_with('SoftLayer_Billing_Invoice', 'getInvoiceTopLevelItems') def test_get_account_billing_items(self): self.manager.get_account_billing_items() object_filter = { "allTopLevelBillingItems": { "cancellationDate": { "operation": "is null" }, "id": { "operation": "orderBy", "options": [ { "name": "sort", "value": ["ASC"] } ] } } } self.assert_called_with('SoftLayer_Account', 'getAllTopLevelBillingItems', offset=0, limit=100, filter=object_filter) self.manager.get_account_billing_items(mask="id") self.assert_called_with('SoftLayer_Account', 'getAllTopLevelBillingItems', mask="mask[id]") def METHOD_NAME(self): self.manager.get_billing_item(12345) self.assert_called_with('SoftLayer_Billing_Item', 'getObject', identifier=12345) self.manager.get_billing_item(12345, mask="id") self.assert_called_with('SoftLayer_Billing_Item', 'getObject', identifier=12345, mask="mask[id]") def test_cancel_item(self): self.manager.cancel_item(12345) reason = "No longer needed" note = "Cancelled by testAccount with the SLCLI" self.assert_called_with('SoftLayer_Billing_Item', 'cancelItem', args=(False, True, reason, note), identifier=12345) reason = "TEST" note = "note test" self.manager.cancel_item(12345, reason, note) self.assert_called_with('SoftLayer_Billing_Item', 'cancelItem', args=(False, True, reason, note), identifier=12345) def test_get_billing_item_from_invoice(self): self.manager.get_billing_item_from_invoice(12345) self.assert_called_with('SoftLayer_Billing_Invoice_Item', 'getBillingItem', identifier=12345) def test_get_item_details_with_billing_item_id(self): self.manager.get_item_detail(12345) self.assert_called_with('SoftLayer_Billing_Item', 'getObject', identifier=12345) def test_get_item_details_with_invoice_item_id(self): mock = self.set_mock('SoftLayer_Billing_Item', 'getObject') mock.side_effect = SoftLayerAPIError(404, "Unable to find object with id of '123456'.") self.manager.get_item_detail(123456) self.assert_called_with('SoftLayer_Billing_Item', 'getObject', identifier=123456) self.assert_called_with('SoftLayer_Billing_Invoice_Item', 'getBillingItem', identifier=123456) def test_get_routers(self): self.manager.get_routers() self.assert_called_with("SoftLayer_Account", "getRouters") def test_get_active_account_licenses(self): self.manager.get_active_account_licenses() self.assert_called_with("SoftLayer_Account", "getActiveAccountLicenses") def test_get_active_virtual_licenses(self): self.manager.get_active_virtual_licenses() self.assert_called_with("SoftLayer_Account", "getActiveVirtualLicenses") def test_get_routers_with_datacenter(self): self.manager.get_routers(location='dal13') object_filter = {'routers': {'topLevelLocation': {'name': {'operation': 'dal13'}}}} self.assert_called_with("SoftLayer_Account", "getRouters", filter=object_filter) def test_get_bandwidth_pools(self): self.manager.get_bandwidth_pools() self.assert_called_with('SoftLayer_Account', 'getBandwidthAllotments', mask=mock.ANY) def test_get_bandwidth_pool_counts(self): total = self.manager.get_bandwidth_pool_counts(1234) self.assert_called_with('SoftLayer_Network_Bandwidth_Version1_Allotment', 'getObject', identifier=1234) self.assertEqual(total, 2) def test_get_provisioning_scripts(self): self.manager.get_provisioning_scripts() self.assert_called_with("SoftLayer_Account", "getPostProvisioningHooks") def test_create_provisioning_scripts(self): self.manager.create_provisioning('testslcli', 'http://slclitest.com') self.assert_called_with('SoftLayer_Provisioning_Hook', 'createObject') def test_delete_provisioning_scripts(self): self.manager.delete_provisioning(123456) self.assert_called_with("SoftLayer_Provisioning_Hook", "deleteObject") def test_get_upgrades_orders(self): self.manager.get_account_upgrade_orders() self.assert_called_with("SoftLayer_Account", "getUpgradeRequests")
null
1,490
#!/usr/bin/env python # # VM Backup extension # # Copyright 2014 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import array import base64 import os import os.path import re import json import string import subprocess import sys import imp import time import shlex import traceback import httplib import xml.parsers.expat import datetime from patch import * from os.path import join from Common import CommonVariables from Utils import HandlerUtil from urlparse import urlparse from RDMALogger import RDMALogger from CronUtil import * from SecondStageMarkConfig import SecondStageMarkConfig def main(): global logger global hutil global MyPatching HandlerUtil.LoggerInit('/var/log/waagent.log','/dev/stdout') HandlerUtil.waagent.Log("%s started to handle." % (CommonVariables.extension_name)) hutil = HandlerUtil.HandlerUtility(HandlerUtil.waagent.Log, HandlerUtil.waagent.Error, CommonVariables.extension_name) logger = RDMALogger(hutil) MyPatching = GetMyPatching(logger) hutil.patching = MyPatching for a in sys.argv[1:]: if re.match("^([-/]*)(disable)", a): disable() elif re.match("^([-/]*)(uninstall)", a): uninstall() elif re.match("^([-/]*)(install)", a): install() elif re.match("^([-/]*)(enable)", a): METHOD_NAME() elif re.match("^([-/]*)(update)", a): update() elif re.match("^([-/]*)(rdmaupdate)", a): rdmaupdate() elif re.match("^([-/]*)(chkrdma)", a): chkrdma() def chkrdma(): hutil.do_parse_context('Executing') check_result = MyPatching.check_rdma() if(check_result == CommonVariables.UpToDate): hutil.do_exit(0, 'Enable','success','0', 'RDMA Driver up to date.') if(check_result == CommonVariables.OutOfDate): hutil.do_exit(0, 'Enable','success','0', 'RDMA Driver out of date.') if(check_result == CommonVariables.DriverVersionNotFound): hutil.do_exit(0, 'Enable','success','0', 'RDMA Driver not found.') if(check_result == CommonVariables.Unknown): hutil.do_exit(0, 'Enable','success','0', 'RDMA version not found.') def rdmaupdate(): hutil.do_parse_context('Executing') try: MyPatching.rdmaupdate() hutil.do_status_report('Enable','success','0', 'Enable Succeeded') MyPatching.reboot_machine() except Exception as e: logger.log("Failed to update with error: %s, stack trace: %s" % (str(e), traceback.format_exc())) hutil.do_exit(0, 'Enable','success','0','enable failed, please take a look at the extension log.') def start_daemon(): args = [os.path.join(os.getcwd(), __file__), "-rdmaupdate"] logger.log("start_daemon with args:" + str(args)) devnull = open(os.devnull, 'w') child = subprocess.Popen(args, stdout=devnull, stderr=devnull) def METHOD_NAME(): # do it one time when enabling. # config the cron job hutil.do_parse_context('Enable') secondStageMarkConfig = SecondStageMarkConfig() if(secondStageMarkConfig.IsMarked()): secondStageMarkConfig.ClearIt() start_daemon() else: hutil.exit_if_enabled() cronUtil = CronUtil(logger) cronUtil.check_update_cron_config() cronUtil.restart_cron() start_daemon() def install(): hutil.do_parse_context('Install') hutil.do_exit(0, 'Install','success','0', 'Install Succeeded') def uninstall(): hutil.do_parse_context('Uninstall') hutil.do_exit(0,'Uninstall','success','0', 'Uninstall succeeded') def disable(): hutil.do_parse_context('Disable') hutil.do_exit(0,'Disable','success','0', 'Disable Succeeded') def update(): hutil.do_parse_context('Upadate') hutil.do_exit(0,'Update','success','0', 'Update Succeeded') if __name__ == '__main__' : main(
null
1,491
""" SoftLayer.tests.CLI.core_tests ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :license: MIT, see LICENSE for more details. """ import io import logging import click from unittest import mock as mock from requests.models import Response import SoftLayer from SoftLayer.CLI import core from SoftLayer.CLI import environment from SoftLayer import testing class CoreTests(testing.TestCase): def test_load_all(self): for path, cmd in recursive_subcommand_loader(core.cli, current_path='root'): try: cmd.main(args=['--help']) except SystemExit as ex: if ex.code != 0: self.fail("Non-zero exit code for command: %s" % path) def METHOD_NAME(self): with mock.patch('logging.getLogger') as log_mock: result = self.run_command(['-vvv', 'vs', 'list']) self.assert_no_fail(result) log_mock().addHandler.assert_called_with(mock.ANY) log_mock().setLevel.assert_called_with(logging.DEBUG) def test_build_client(self): env = environment.Environment() result = self.run_command(['vs', 'list'], env=env) self.assert_no_fail(result) self.assertIsNotNone(env.client) def test_diagnostics(self): result = self.run_command(['-v', 'vs', 'list']) self.assert_no_fail(result) self.assertIn('SoftLayer_Account::getVirtualGuests', result.output) self.assertIn('"execution_time"', result.output) self.assertIn('"api_calls"', result.output) self.assertIn('"version"', result.output) self.assertIn('"python_version"', result.output) self.assertIn('"library_location"', result.output) @mock.patch('requests.get') def test_get_latest_version(self, request_get): response = Response() response.status_code = 200 response.json = mock.MagicMock(return_value={"info": {"version": "1.1.1"}}) request_get.return_value = response version = core.get_latest_version() self.assertIn('1.1.1', version) @mock.patch('requests.get') def test_unable_get_latest_version(self, request_get): request_get.side_effect = Exception version = core.get_latest_version() self.assertIn('Unable', version) @mock.patch('SoftLayer.CLI.core.get_latest_version') def test_get_version_message(self, get_latest_version_mock): get_latest_version_mock.return_value = '1.1.1' env = environment.Environment() result = self.run_command(['--version'], env=env) self.assert_no_fail(result) class CoreMainTests(testing.TestCase): @mock.patch('SoftLayer.CLI.core.cli.main') @mock.patch('sys.stdout', new_callable=io.StringIO) def test_unexpected_error(self, stdoutmock, climock): climock.side_effect = AttributeError('Attribute foo does not exist') self.assertRaises(SystemExit, core.main) self.assertIn("Feel free to report this error as it is likely a bug", stdoutmock.getvalue()) self.assertIn("Traceback (most recent call last)", stdoutmock.getvalue()) self.assertIn("AttributeError: Attribute foo does not exist", stdoutmock.getvalue()) @mock.patch('SoftLayer.CLI.core.cli.main') @mock.patch('sys.stdout', new_callable=io.StringIO) def test_sl_error(self, stdoutmock, climock): ex = SoftLayer.SoftLayerAPIError('SoftLayer_Exception', 'Not found') climock.side_effect = ex self.assertRaises(SystemExit, core.main) self.assertIn("SoftLayerAPIError(SoftLayer_Exception): Not found", stdoutmock.getvalue()) @mock.patch('SoftLayer.CLI.core.cli.main') @mock.patch('sys.stdout', new_callable=io.StringIO) def test_auth_error(self, stdoutmock, climock): ex = SoftLayer.SoftLayerAPIError('SoftLayer_Exception', 'Invalid API token.') climock.side_effect = ex self.assertRaises(SystemExit, core.main) self.assertIn("Authentication Failed:", stdoutmock.getvalue()) self.assertIn("use 'slcli config setup'", stdoutmock.getvalue()) def recursive_subcommand_loader(root, current_path=''): """Recursively load and list every command.""" if getattr(root, 'list_commands', None) is None: return ctx = click.Context(root) for command in root.list_commands(ctx): new_path = '%s:%s' % (current_path, command) logging.info("loading %s", new_path) new_root = root.get_command(ctx, command) if new_root is None: raise Exception('Could not load command: %s' % command) for path, cmd in recursive_subcommand_loader(new_root, current_path=new_path): yield path, cmd yield current_path, new_root
null
1,492
# -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # # Copyright 2021-2023 Valory AG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ------------------------------------------------------------------------------ """Watcher script and wrapper container for agent.""" import os import shutil import signal import subprocess # nosec import sys import time from pathlib import Path from typing import Optional import requests from watchdog.events import EVENT_TYPE_CLOSED, FileSystemEvent, FileSystemEventHandler from watchdog.observers import Observer ID = os.environ.get("ID") ROOT = "/home/ubuntu" AGENT_DIR = ROOT + "/agent" PACKAGES_PATH = "/home/ubuntu/packages" OPEN_AEA_PATH = "/open-aea" BASE_START_FILE = "/home/ubuntu/start.sh" TENDERMINT_COM_URL = os.environ.get("TENDERMINT_COM_URL", f"http://node{ID}:8080") def write(line: str) -> None: """Write to console.""" sys.stdout.write(line) sys.stdout.write("\n") sys.stdout.flush() def call_vote() -> None: """ Call vote. Since there's a lot of resource sharing between docker containers one of the environments can fallback during `base_setup` so to make sure there's no error caused by one of the agents left behind this method will help. """ write("Calling vote.") with open(f"/logs/{ID}.vote", "w+") as fp: fp.write(str(ID)) class AEARunner: """AEA Runner.""" process: Optional[subprocess.Popen] # nosec def __init__(self) -> None: """Initialize runner.""" self.process = None @staticmethod def restart_tendermint() -> None: """Restart respective tendermint node.""" write("Restarting Tendermint.") try: response = requests.get(TENDERMINT_COM_URL + "/hard_reset") if response.status_code != 200: write("Tendermint node not yet available.") except requests.exceptions.ConnectionError: write("Tendermint node not yet available.") def start( self, ) -> None: """Start AEA process.""" if self.process is not None: return write("Starting Agent.") os.chdir(ROOT) if Path(AGENT_DIR).exists(): shutil.rmtree(AGENT_DIR) self.process = subprocess.Popen( # nosec ["/bin/bash", BASE_START_FILE], preexec_fn=os.setsid ) def stop( self, ) -> None: """Stop AEA process.""" if self.process is None: return write("Stopping Agent.") os.killpg( os.getpgid(self.process.pid), signal.SIGTERM ) # kills process instantly compared to process.terminate self.process = None class EventHandler(FileSystemEventHandler): """Handle file updates.""" def __init__( self, aea_runner: AEARunner, fingerprint_on_restart: bool = True ) -> None: """Initialize object.""" super().__init__() self.aea = aea_runner self.fingerprint_on_restart = fingerprint_on_restart @staticmethod def fingerprint_item(src_path: str) -> None: """Fingerprint items.""" cwd = os.curdir *_path, vendor, item_type, item_name, _ = src_path.split(os.path.sep) vendor_dir_str = os.path.sep.join([*_path, vendor]) os.chdir(vendor_dir_str) subprocess.call( # nosec [ "python3", "-m", "aea.cli", "fingerprint", item_type[:-1], f"{vendor}/{item_name}", ] ) os.chdir(cwd) @staticmethod def clean_up() -> None: """Clean up from previous run.""" os.chdir(ROOT) if Path(AGENT_DIR).exists(): write("removing aea dir.") shutil.rmtree(AGENT_DIR) def METHOD_NAME(self, event: FileSystemEvent) -> None: """This method reloads the agent when a change is detected in *.py file.""" if ( not event.is_directory and event.event_type == EVENT_TYPE_CLOSED and event.src_path.endswith(".py") ): write("Change detected.") self.clean_up() if self.fingerprint_on_restart: self.fingerprint_item(event.src_path) self.aea.stop() self.aea.restart_tendermint() self.aea.start() if __name__ == "__main__": aea_runner = AEARunner() package_observer = Observer() package_observer.schedule( EventHandler(aea_runner=aea_runner), PACKAGES_PATH, recursive=True ) open_aea_observer = Observer() open_aea_observer.schedule( EventHandler(aea_runner=aea_runner, fingerprint_on_restart=False), OPEN_AEA_PATH, recursive=True, ) try: aea_runner.start() package_observer.start() open_aea_observer.start() while True: time.sleep(1) except KeyboardInterrupt: aea_runner.stop() package_observer.stop() open_aea_observer.stop() open_aea_observer.join() package_observer.join()
null
1,493
#!/usr/bin/python 'IPython support for fonttools' __all__ = ['displayGlyphs', 'loadFont', 'displayText', 'displayRaw'] from fontTools import ttLib from fontTools.pens.basePen import BasePen from fontTools.misc import arrayTools from IPython.display import SVG, HTML from defcon import Font from ufo2ft import compileTTF class SVGPen(BasePen) : def __init__(self, glyphSet, scale=1.0) : super(SVGPen, self).__init__(glyphSet); self.__commands = [] self.__scale = scale def __str__(self) : return " ".join(self.__commands) def scale(self, pt) : return ((pt[0] or 0) * self.__scale, (pt[1] or 0) * self.__scale) def _moveTo(self, pt): self.__commands.append("M {0[0]} {0[1]}".format(self.scale(pt))) def _lineTo(self, pt): self.__commands.append("L {0[0]} {0[1]}".format(self.scale(pt))) def _curveToOne(self, pt1, pt2, pt3) : self.__commands.append("C {0[0]} {0[1]} {1[0]} {1[1]} {2[0]} {2[1]}".format(self.scale(pt1), self.scale(pt2), self.scale(pt3))) def _closePath(self) : self.__commands.append("Z") def clear(self) : self.__commands = [] def _svgheader(): return '''<?xml version="1.0"?> <svg xmlns="https://www.w3.org/2000/svg" xmlns:xlink="https://www.w3.org/1999/xlink" version="1.1"> ''' def _bbox(f, gnames, points, scale=1): gset = f.glyphSet bbox = (0, 0, 0, 0) for i, gname in enumerate(gnames): if hasattr(points, '__len__') and i == len(points): points.append((bbox[2] / scale, 0)) pt = points[i] if i < len(points) else (0, 0) g = gset[gname]._glyph if g is None or not hasattr(g, 'xMin') : gbox = (0, 0, 0, 0) else : gbox = (g.xMin * scale, g.yMin * scale, g.xMax * scale, g.yMax * scale) bbox = arrayTools.unionRect(bbox, arrayTools.offsetRect(gbox, pt[0] * scale, pt[1] * scale)) return bbox glyphsetcount = 0 def _defglyphs(f, gnames, scale=1): global glyphsetcount glyphsetcount += 1 gset = f.glyphSet p = SVGPen(gset, scale) res = "<defs><g>\n" for gname in sorted(set(gnames)): res += '<symbol overflow="visible" id="{}_{}">\n'.format(gname, glyphsetcount) g = gset[gname] p.clear() g.draw(p) res += '<path style="stroke:none;" d="' + str(p) + '"/>\n</symbol>\n' res += "</g></defs>\n" return res def loadFont(fname): if fname.lower().endswith(".ufo"): ufo = Font(fname) f = compileTTF(ufo) else: f = ttLib.TTFont(fname) return f def displayGlyphs(f, gnames, points=None, scale=None): if not hasattr(gnames, '__len__') or isinstance(gnames, basestring): gnames = [gnames] if not hasattr(points, '__len__'): points = [] if not hasattr(f, 'glyphSet'): f.glyphSet = f.getGlyphSet() res = _svgheader() if points is None: points = [] bbox = _bbox(f, gnames, points, scale or 1) maxh = 100. height = bbox[3] - (bbox[1] if bbox[1] < 0 else 0) if scale is None and height > maxh: scale = maxh / height bbox = [x * scale for x in bbox] res += _defglyphs(f, gnames, scale) res += '<g id="surface1" transform="matrix(1,0,0,-1,{},{})">\n'.format(-bbox[0], bbox[3]) res += ' <rect x="{}" y="{}" width="{}" height="{}" style="fill:white;stroke:none"/>\n'.format( bbox[0], bbox[1], bbox[2]-bbox[0], bbox[3]) res += ' <g style="fill:black">\n' for i, gname in enumerate(gnames): pt = points[i] if i < len(points) else (0, 0) res += ' <use xlink:href="#{0}_{3}" x="{1}" y="{2}"/>\n'.format(gname, pt[0] * scale, pt[1] * scale, glyphsetcount) res += ' </g></g>\n</svg>\n' return SVG(data=res) #return res def METHOD_NAME(f, text, features = [], lang=None, dir="", script="", shapers="", size=0): import harfbuzz glyphs = harfbuzz.shape_text(f, text, features, lang, dir, script, shapers) gnames = [] points = [] x = 0 y = 0 for g in glyphs: gnames.append(f.getGlyphName(g.gid)) points.append((x+g.offset[0], y+g.offset[1])) x += g.advance[0] y += g.advance[1] if size == 0: scale = None else: upem = f['head'].unitsPerEm scale = 4. * size / (upem * 3.) return displayGlyphs(f, gnames, points, scale=scale) def displayRaw(text): # res = "<html><body>"+text.encode('utf-8')+"</body></html>" res = u"<html><body><p>"+text+u"</p></body></html>" return HTML(data=res)
null
1,494
import os import threading import time from typing import Optional import psutil from galaxy import ( job_metrics, model, ) from galaxy.app_unittest_utils.tools_support import UsesTools from galaxy.jobs.runners import local from galaxy.util import bunch from galaxy.util.unittest import TestCase class TestLocalJobRunner(TestCase, UsesTools): def setUp(self): self.setup_app() self._init_tool() self.app.job_metrics = job_metrics.JobMetrics() self.job_wrapper = MockJobWrapper(self.app, self.test_directory, self.tool) def tearDown(self): self.tear_down_app() def test_run(self): self.job_wrapper.command_line = "echo HelloWorld" runner = local.LocalJobRunner(self.app, 1) runner.queue_job(self.job_wrapper) assert self.job_wrapper.stdout.strip() == "HelloWorld" def test_galaxy_lib_on_path(self): self.job_wrapper.command_line = '''python -c "import galaxy.util"''' runner = local.LocalJobRunner(self.app, 1) runner.queue_job(self.job_wrapper) assert self.job_wrapper.exit_code == 0 def test_default_slots(self): self.job_wrapper.command_line = """echo $GALAXY_SLOTS""" runner = local.LocalJobRunner(self.app, 1) runner.queue_job(self.job_wrapper) assert self.job_wrapper.stdout.strip() == "1" def test_slots_override(self): # Set local_slots in job destination to specify slots for # local job runner. self.job_wrapper.job_destination.params["local_slots"] = 3 self.job_wrapper.command_line = """echo $GALAXY_SLOTS""" runner = local.LocalJobRunner(self.app, 1) runner.queue_job(self.job_wrapper) assert self.job_wrapper.stdout.strip() == "3" def test_exit_code(self): self.job_wrapper.command_line = '''sh -c "exit 4"''' runner = local.LocalJobRunner(self.app, 1) runner.queue_job(self.job_wrapper) assert self.job_wrapper.exit_code == 4 def test_metadata_gets_set(self): runner = local.LocalJobRunner(self.app, 1) runner.queue_job(self.job_wrapper) assert os.path.exists(self.job_wrapper.mock_metadata_path) def test_metadata_gets_set_if_embedded(self): self.job_wrapper.job_destination.params["embed_metadata_in_job"] = "True" # Kill off cruft for _handle_metadata_externally and make sure job still works... self.job_wrapper.external_output_metadata = None self.app.datatypes_registry.set_external_metadata_tool = None runner = local.LocalJobRunner(self.app, 1) runner.queue_job(self.job_wrapper) assert os.path.exists(self.job_wrapper.mock_metadata_path) def test_stopping_job(self): self.job_wrapper.command_line = '''python -c "import time; time.sleep(15)"''' runner = local.LocalJobRunner(self.app, 1) def queue(): runner.queue_job(self.job_wrapper) t = threading.Thread(target=queue) t.start() external_id = self.job_wrapper.wait_for_external_id() assert psutil.pid_exists(external_id) runner.stop_job(self.job_wrapper) t.join(1) assert not psutil.pid_exists(external_id) def test_shutdown_no_jobs(self): self.app.config.monitor_thread_join_timeout = 5 runner = local.LocalJobRunner(self.app, 1) runner.start() runner.shutdown() def test_stopping_job_at_shutdown(self): self.job_wrapper.command_line = '''python -c "import time; time.sleep(15)"''' self.app.model.session = bunch.Bunch(add=lambda x: None, flush=lambda: None) runner = local.LocalJobRunner(self.app, 1) runner.start() self.app.config.monitor_thread_join_timeout = 15 def queue(): runner.queue_job(self.job_wrapper) t = threading.Thread(target=queue) t.start() external_id = self.job_wrapper.wait_for_external_id() assert psutil.pid_exists(external_id) runner.shutdown() t.join(1) assert not psutil.pid_exists(external_id) assert "job terminated by Galaxy shutdown" in self.job_wrapper.fail_message class MockJobWrapper: def __init__(self, app, test_directory, tool): working_directory = os.path.join(test_directory, "workdir") tool_working_directory = os.path.join(working_directory, "working") os.makedirs(tool_working_directory) self.app = app self.tool = tool self.requires_containerization = False self.state = model.Job.states.QUEUED self.command_line = "echo HelloWorld" self.environment_variables = [] self.commands_in_new_shell = False self.prepare_called = False self.dependency_shell_commands = None self.working_directory = working_directory self.tool_working_directory = tool_working_directory self.requires_setting_metadata = True self.job_destination = bunch.Bunch(id="default", params={}) self.galaxy_lib_dir = os.path.abspath("lib") self.job = model.Job() self.job_id = 1 self.job.id = 1 self.output_paths = ["/tmp/output1.dat"] self.mock_metadata_path = os.path.abspath(os.path.join(test_directory, "METADATA_SET")) self.metadata_command = f"touch {self.mock_metadata_path}" self.galaxy_virtual_env = None self.shell = "/bin/bash" self.cleanup_job = "never" self.tmp_dir_creation_statement = "" self.use_metadata_binary = False self.guest_ports = [] self.metadata_strategy = "directory" self.remote_command_line = False # Cruft for setting metadata externally, axe at some point. self.external_output_metadata: Optional[bunch.Bunch] = bunch.Bunch() self.app.datatypes_registry.set_external_metadata_tool = bunch.Bunch(build_dependency_shell_commands=lambda: []) def check_tool_output(*args, **kwds): return "ok" def wait_for_external_id(self): """Test method for waiting until an external id has been registered.""" external_id = None for _ in range(50): external_id = self.job.job_runner_external_id if external_id: break time.sleep(0.1) return external_id def prepare(self): self.prepare_called = True def set_external_id(self, external_id, **kwd): self.job.job_runner_external_id = external_id def get_command_line(self): return self.command_line def container_monitor_command(self, *args, **kwds): return None def get_id_tag(self): return "1" def get_state(self): return self.state def change_state(self, state, job=None): self.state = state @property def job_io(self): return bunch.Bunch( get_output_fnames=lambda: [], check_job_script_integrity=False, version_path="/tmp/version_path" ) def METHOD_NAME(self): return self.job def setup_external_metadata(self, **kwds): return self.metadata_command def get_env_setup_clause(self): return "" def has_limits(self): return False def fail( self, message, exception=False, tool_stdout="", tool_stderr="", exit_code=None, job_stdout=None, job_stderr=None ): self.fail_message = message self.fail_exception = exception def finish(self, stdout, stderr, exit_code, **kwds): self.stdout = stdout self.stderr = stderr self.exit_code = exit_code def tmp_directory(self): return None def home_directory(self): return None def reclaim_ownership(self): pass @property def is_cwl_job(self): return False
null
1,495
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkconfig.endpoint import endpoint_data class CreateAggregateConfigDeliveryChannelRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Config', '2020-09-07', 'CreateAggregateConfigDeliveryChannel') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_NonCompliantNotification(self): # Boolean return self.get_query_params().get('NonCompliantNotification') def set_NonCompliantNotification(self, NonCompliantNotification): # Boolean self.add_query_param('NonCompliantNotification', NonCompliantNotification) def get_ClientToken(self): # String return self.get_query_params().get('ClientToken') def set_ClientToken(self, ClientToken): # String self.add_query_param('ClientToken', ClientToken) def get_ConfigurationSnapshot(self): # Boolean return self.get_query_params().get('ConfigurationSnapshot') def set_ConfigurationSnapshot(self, ConfigurationSnapshot): # Boolean self.add_query_param('ConfigurationSnapshot', ConfigurationSnapshot) def get_Description(self): # String return self.get_query_params().get('Description') def set_Description(self, Description): # String self.add_query_param('Description', Description) def get_AggregatorId(self): # String return self.get_query_params().get('AggregatorId') def set_AggregatorId(self, AggregatorId): # String self.add_query_param('AggregatorId', AggregatorId) def get_DeliveryChannelTargetArn(self): # String return self.get_query_params().get('DeliveryChannelTargetArn') def set_DeliveryChannelTargetArn(self, DeliveryChannelTargetArn): # String self.add_query_param('DeliveryChannelTargetArn', DeliveryChannelTargetArn) def get_DeliveryChannelCondition(self): # String return self.get_query_params().get('DeliveryChannelCondition') def set_DeliveryChannelCondition(self, DeliveryChannelCondition): # String self.add_query_param('DeliveryChannelCondition', DeliveryChannelCondition) def get_ConfigurationItemChangeNotification(self): # Boolean return self.get_query_params().get('ConfigurationItemChangeNotification') def set_ConfigurationItemChangeNotification(self, ConfigurationItemChangeNotification): # Boolean self.add_query_param('ConfigurationItemChangeNotification', ConfigurationItemChangeNotification) def get_DeliveryChannelName(self): # String return self.get_query_params().get('DeliveryChannelName') def set_DeliveryChannelName(self, DeliveryChannelName): # String self.add_query_param('DeliveryChannelName', DeliveryChannelName) def METHOD_NAME(self): # String return self.get_query_params().get('DeliverySnapshotTime') def set_DeliverySnapshotTime(self, DeliverySnapshotTime): # String self.add_query_param('DeliverySnapshotTime', DeliverySnapshotTime) def get_OversizedDataOSSTargetArn(self): # String return self.get_query_params().get('OversizedDataOSSTargetArn') def set_OversizedDataOSSTargetArn(self, OversizedDataOSSTargetArn): # String self.add_query_param('OversizedDataOSSTargetArn', OversizedDataOSSTargetArn) def get_DeliveryChannelType(self): # String return self.get_query_params().get('DeliveryChannelType') def set_DeliveryChannelType(self, DeliveryChannelType): # String self.add_query_param('DeliveryChannelType', DeliveryChannelType)
null
1,496
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkimm.endpoint import endpoint_data import json class GenerateWebofficeTokenRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'imm', '2020-09-30', 'GenerateWebofficeToken','imm') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_Referer(self): # String return self.get_query_params().get('Referer') def set_Referer(self, Referer): # String self.add_query_param('Referer', Referer) def get_Notification(self): # Struct return self.get_query_params().get('Notification') def set_Notification(self, Notification): # Struct self.add_query_param("Notification", json.dumps(Notification)) def get_Password(self): # String return self.get_query_params().get('Password') def set_Password(self, Password): # String self.add_query_param('Password', Password) def get_ProjectName(self): # String return self.get_query_params().get('ProjectName') def set_ProjectName(self, ProjectName): # String self.add_query_param('ProjectName', ProjectName) def get_Watermark(self): # Struct return self.get_query_params().get('Watermark') def set_Watermark(self, Watermark): # Struct self.add_query_param("Watermark", json.dumps(Watermark)) def get_NotifyTopicName(self): # String return self.get_query_params().get('NotifyTopicName') def METHOD_NAME(self, NotifyTopicName): # String self.add_query_param('NotifyTopicName', NotifyTopicName) def get_Filename(self): # String return self.get_query_params().get('Filename') def set_Filename(self, Filename): # String self.add_query_param('Filename', Filename) def get_SourceURI(self): # String return self.get_query_params().get('SourceURI') def set_SourceURI(self, SourceURI): # String self.add_query_param('SourceURI', SourceURI) def get_ExternalUploaded(self): # Boolean return self.get_query_params().get('ExternalUploaded') def set_ExternalUploaded(self, ExternalUploaded): # Boolean self.add_query_param('ExternalUploaded', ExternalUploaded) def get_UserData(self): # String return self.get_query_params().get('UserData') def set_UserData(self, UserData): # String self.add_query_param('UserData', UserData) def get_PreviewPages(self): # Long return self.get_query_params().get('PreviewPages') def set_PreviewPages(self, PreviewPages): # Long self.add_query_param('PreviewPages', PreviewPages) def get_Hidecmb(self): # Boolean return self.get_query_params().get('Hidecmb') def set_Hidecmb(self, Hidecmb): # Boolean self.add_query_param('Hidecmb', Hidecmb) def get_CachePreview(self): # Boolean return self.get_query_params().get('CachePreview') def set_CachePreview(self, CachePreview): # Boolean self.add_query_param('CachePreview', CachePreview) def get_Permission(self): # Struct return self.get_query_params().get('Permission') def set_Permission(self, Permission): # Struct self.add_query_param("Permission", json.dumps(Permission)) def get_CredentialConfig(self): # Struct return self.get_query_params().get('CredentialConfig') def set_CredentialConfig(self, CredentialConfig): # Struct self.add_query_param("CredentialConfig", json.dumps(CredentialConfig)) def get_User(self): # Struct return self.get_query_params().get('User') def set_User(self, User): # Struct self.add_query_param("User", json.dumps(User))
null
1,497
import os import subprocess import pysam from TestUtils import force_str def build_pileup_with_samtoolsshell(fn): os.system("samtools mpileup {} 2> /dev/null | wc -l > /dev/null".format(fn)) return 2998 def build_pileup_with_samtoolspipe(fn): FNULL = open(os.devnull, 'w') with subprocess.Popen(["samtools", "mpileup", fn], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=FNULL) as proc: return len(proc.stdout.readlines()) def build_pileup_with_pysam(*args, **kwargs): with pysam.AlignmentFile(*args, **kwargs) as inf: return len(list(inf.pileup(stepper="samtools"))) def build_depth_with_samtoolsshell(fn): os.system( "samtools mpileup {} 2> /dev/null | awk '{{a += $4}} END {{print a}}' > /dev/null".format(fn)) return 107241 def build_depth_with_samtoolspipe(fn): FNULL = open(os.devnull, 'w') with subprocess.Popen(["samtools", "mpileup", fn], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=FNULL) as proc: data = [x.split() for x in proc.stdout.readlines()] return [int(x[3]) for x in data] def build_depth_with_filter_with_pysam(*args, **kwargs): with pysam.AlignmentFile(*args, **kwargs) as inf: return [x.get_num_aligned() for x in inf.pileup(stepper="samtools")] def build_depth_with_pysam(*args, **kwargs): with pysam.AlignmentFile(*args, **kwargs) as inf: return [x.nsegments for x in inf.pileup(stepper="samtools")] def build_query_bases_with_samtoolsshell(fn): os.system("samtools mpileup {} 2> /dev/null | awk '{{a = a $5}} END {{print a}}' | wc -c > /dev/null".format(fn)) return 116308 def build_query_bases_with_samtoolspipe(fn, *args, **kwargs): FNULL = open(os.devnull, 'w') with subprocess.Popen(["samtools", "mpileup", fn] + list(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=FNULL) as proc: stdout = proc.stdout.read().decode() return [x.split()[4] for x in stdout.splitlines()] def build_query_bases_with_samtoolspysam(fn, *args): return [x.split()[4] for x in pysam.samtools.mpileup(fn, *args).splitlines()] def build_query_bases_with_pysam_pileups(*args, **kwargs): total_pileup = [] with pysam.AlignmentFile(*args, **kwargs) as inf: total_pileup = [ [r.alignment.query_sequence[r.query_position_or_next] for r in column.pileups if r.query_position_or_next is not None] for column in inf.pileup(stepper="samtools")] return total_pileup def METHOD_NAME(*args, **kwargs): total_pileup = [] with pysam.AlignmentFile(*args, **kwargs) as inf: total_pileup = [ [r.alignment.query_qualities[r.query_position_or_next] for r in column.pileups if r.query_position_or_next is not None] for column in inf.pileup(stepper="samtools")] return total_pileup def build_query_bases_with_pysam(fn, *args, **kwargs): total_pileup = [] with pysam.AlignmentFile(fn) as inf: total_pileup = [column.get_query_sequences( mark_ends=True, add_indels=True, mark_matches=True) for column in inf.pileup(*args, **kwargs)] return total_pileup def build_query_names_with_pysam(*args, **kwargs): total_pileup = [] with pysam.AlignmentFile(*args, **kwargs) as inf: total_pileup = [column.get_query_names() for column in inf.pileup(stepper="samtools")] return total_pileup def build_query_qualities_with_samtoolspipe(fn): FNULL = open(os.devnull, 'w') with subprocess.Popen(["samtools", "mpileup", fn], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=FNULL) as proc: data = [force_str(x).split()[5] for x in proc.stdout.readlines()] return data def build_query_qualities_with_pysam(*args, **kwargs): total_pileup = [] with pysam.AlignmentFile(*args, **kwargs) as inf: total_pileup = [column.get_query_qualities() for column in inf.pileup(stepper="samtools")] return total_pileup def build_mapping_qualities_with_samtoolspipe(fn): FNULL = open(os.devnull, 'w') with subprocess.Popen(["samtools", "mpileup", "-s", fn], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=FNULL) as proc: data = [force_str(x).split()[6] for x in proc.stdout.readlines()] return data def build_mapping_qualities_with_pysam(*args, **kwargs): total_pileup = [] with pysam.AlignmentFile(*args, **kwargs) as inf: total_pileup = [column.get_mapping_qualities() for column in inf.pileup(stepper="samtools")] return total_pileup def build_query_positions_with_samtoolspipe(fn): FNULL = open(os.devnull, 'w') with subprocess.Popen(["samtools", "mpileup", "-O", fn], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=FNULL) as proc: data = [list(map(int, force_str(x).split()[6].split(","))) for x in proc.stdout.readlines()] return data def build_query_positions_with_pysam(*args, **kwargs): total_pileup = [] with pysam.AlignmentFile(*args, **kwargs) as inf: total_pileup = [column.get_query_positions() for column in inf.pileup(stepper="samtools")] return total_pileup
null
1,498
# -------------------------------------------------------------- # Index position functions # -------------------------------------------------------------- def indexPosition1D(i, N): """This function is a generic function which determines if index over a list of length N is an interior point or node 0 or node 1. """ if 0 < i < N - 1: # Interior return 0, None elif i == 0: # Node 0 return 1, 0 elif i == N - 1: # Node 1 return 1, 1 def indexPosition2D(i, j, N, M): """This function is a generic function which determines if for a grid of data NxM with index i going 0->N-1 and j going 0->M-1, it determines if i,j is on the interior, on an edge or on a corner The function return four values: type: this is 0 for interior, 1 for on an edge and 2 for on a corner edge: this is the edge number if type==1 node: this is the node number if type==2 index: this is the value index along the edge of interest -- only defined for edges""" if 0 < i < N - 1 and 0 < j < M - 1: # Interior return 0, None, None, None elif 0 < i < N - 1 and j == 0: # Edge 0 return 1, 0, None, i elif 0 < i < N - 1 and j == M - 1: # Edge 1 return 1, 1, None, i elif i == 0 and 0 < j < M - 1: # Edge 2 return 1, 2, None, j elif i == N - 1 and 0 < j < M - 1: # Edge 3 return 1, 3, None, j elif i == 0 and j == 0: # Node 0 return 2, None, 0, None elif i == N - 1 and j == 0: # Node 1 return 2, None, 1, None elif i == 0 and j == M - 1: # Node 2 return 2, None, 2, None elif i == N - 1 and j == M - 1: # Node 3 return 2, None, 3, None def METHOD_NAME(i, j, k, N, M, L): """This function is a generic function which determines if for a 3D grid of data NxMXL with index i going 0->N-1 and j going 0->M-1 k going 0->L-1, it determines if i,j,k is on the interior, on a face, on an edge or on a corner Returns ------- type : int this is 0 for interior, 1 for on an face, 3 for an edge and 4 for on a corner number : int this is the face number if type==1, this is the edge number if type==2, this is the node number if type==3 index1 : int this is the value index along 0th dir the face of interest OR edge of interest index2 : int this is the value index along 1st dir the face of interest """ # Note to interior->Faces->Edges->Nodes to minimize number of if checks # Interior: if 0 < i < N - 1 and 0 < j < M - 1 and 0 < k < L - 1: return 0, None, None, None elif 0 < i < N - 1 and 0 < j < M - 1 and k == 0: # Face 0 return 1, 0, i, j elif 0 < i < N - 1 and 0 < j < M - 1 and k == L - 1: # Face 1 return 1, 1, i, j elif i == 0 and 0 < j < M - 1 and 0 < k < L - 1: # Face 2 return 1, 2, j, k elif i == N - 1 and 0 < j < M - 1 and 0 < k < L - 1: # Face 3 return 1, 3, j, k elif 0 < i < N - 1 and j == 0 and 0 < k < L - 1: # Face 4 return 1, 4, i, k elif 0 < i < N - 1 and j == M - 1 and 0 < k < L - 1: # Face 5 return 1, 5, i, k elif 0 < i < N - 1 and j == 0 and k == 0: # Edge 0 return 2, 0, i, None elif 0 < i < N - 1 and j == M - 1 and k == 0: # Edge 1 return 2, 1, i, None elif i == 0 and 0 < j < M - 1 and k == 0: # Edge 2 return 2, 2, j, None elif i == N - 1 and 0 < j < M - 1 and k == 0: # Edge 3 return 2, 3, j, None elif 0 < i < N - 1 and j == 0 and k == L - 1: # Edge 4 return 2, 4, i, None elif 0 < i < N - 1 and j == M - 1 and k == L - 1: # Edge 5 return 2, 5, i, None elif i == 0 and 0 < j < M - 1 and k == L - 1: # Edge 6 return 2, 6, j, None elif i == N - 1 and 0 < j < M - 1 and k == L - 1: # Edge 7 return 2, 7, j, None elif i == 0 and j == 0 and 0 < k < L - 1: # Edge 8 return 2, 8, k, None elif i == N - 1 and j == 0 and 0 < k < L - 1: # Edge 9 return 2, 9, k, None elif i == 0 and j == M - 1 and 0 < k < L - 1: # Edge 10 return 2, 10, k, None elif i == N - 1 and j == M - 1 and 0 < k < L - 1: # Edge 11 return 2, 11, k, None elif i == 0 and j == 0 and k == 0: # Node 0 return 3, 0, None, None elif i == N - 1 and j == 0 and k == 0: # Node 1 return 3, 1, None, None elif i == 0 and j == M - 1 and k == 0: # Node 2 return 3, 2, None, None elif i == N - 1 and j == M - 1 and k == 0: # Node 3 return 3, 3, None, None elif i == 0 and j == 0 and k == L - 1: # Node 4 return 3, 4, None, None elif i == N - 1 and j == 0 and k == L - 1: # Node 5 return 3, 5, None, None elif i == 0 and j == M - 1 and k == L - 1: # Node 6 return 3, 6, None, None elif i == N - 1 and j == M - 1 and k == L - 1: # Node 7 return 3, 7, None, None
null
1,499
'''Adapted from https://github.com/lucidrains/local-attention.''' import math from typing import Optional import torch from torch import nn, einsum import torch.nn.functional as F from einops import rearrange, repeat, pack, unpack from archai.discrete_search.search_spaces.config import ArchConfig TOKEN_SELF_ATTN_VALUE = -5e4 class SinusoidalEmbeddings(nn.Module): def __init__(self, dim): super().__init__() inv_freq = 1. / (10000 ** (torch.arange(0, dim, 2).float() / dim)) self.register_buffer('inv_freq', inv_freq) def forward(self, x): n = x.shape[-2] t = torch.arange(n, device = x.device).type_as(self.inv_freq) freqs = torch.einsum('i , j -> i j', t, self.inv_freq) return torch.cat((freqs, freqs), dim=-1) def rotate_half(x): x = rearrange(x, 'b ... (r d) -> b (...) r d', r = 2) x1, x2 = x.unbind(dim = -2) return torch.cat((-x2, x1), dim = -1) def apply_rotary_pos_emb(q, k, freqs): q, k = map(lambda t: (t * freqs.cos()) + (rotate_half(t) * freqs.sin()), (q, k)) return q, k def max_neg_value(tensor): return -torch.finfo(tensor.dtype).max def pad_to_multiple(tensor, multiple, dim=-1, value=0): seqlen = tensor.shape[dim] m = seqlen / multiple if m.is_integer(): return False, tensor remainder = math.ceil(m) * multiple - seqlen pad_offset = (0,) * (-1 - dim) * 2 return True, F.pad(tensor, (*pad_offset, 0, remainder), value = value) def METHOD_NAME(x, backward = 1, forward = 0, pad_value = -1, dim = 2): t = x.shape[1] dims = (len(x.shape) - dim) * (0, 0) padded_x = F.pad(x, (*dims, backward, forward), value = pad_value) tensors = [padded_x[:, ind:(ind + t), ...] for ind in range(forward + backward + 1)] return torch.cat(tensors, dim = dim) class LocalAttention(nn.Module): def __init__( self, window_size, causal = False, look_backward = 1, look_forward = None, dropout = 0., autopad = False, exact_windowsize = False, pad_value: int = -1, rel_pos_emb_dim: Optional[int] = None, **kwargs ): super().__init__() look_forward = look_forward or (0 if causal else 1) assert not (causal and look_forward > 0) self.window_size = window_size self.autopad = autopad self.exact_windowsize = exact_windowsize self.causal = causal self.look_backward = look_backward self.look_forward = look_forward self.pad_value = pad_value self.dropout = nn.Dropout(dropout) self.rel_pos = None if rel_pos_emb_dim is not None: # backwards compatible with old `rel_pos_emb_config` deprecated argument self.rel_pos = SinusoidalEmbeddings(rel_pos_emb_dim) def forward(self, q, k, v, bin_attention_mask: Optional[torch.FloatTensor] = None): # https://github.com/arogozhnikov/einops/blob/master/docs/4-pack-and-unpack.ipynb (q, packed_shape), (k, _), (v, _) = map(lambda t: pack([t], '* n d'), (q, k, v)) if self.rel_pos is not None: pos_emb = self.rel_pos(q) q, k = apply_rotary_pos_emb(q, k, pos_emb) # auto padding if self.autopad: orig_seq_len = q.shape[1] (needed_pad, q), (_, k), (_, v) = map(lambda t: pad_to_multiple(t, self.window_size, dim = -2), (q, k, v)) b, n, dim_head, device, dtype = *q.shape, q.device, q.dtype scale = dim_head ** -0.5 assert (n % self.window_size) == 0, f'sequence length {n} must be divisible by window size {self.window_size} for local attention' windows = n // self.window_size seq = torch.arange(n, device = device) b_t = rearrange(seq, '(w n) -> 1 w n', w = windows, n = self.window_size) bq, bk, bv = map(lambda t: rearrange(t, 'b (w n) d -> b w n d', w = windows), (q, k, v)) look_around_kwargs = dict( backward = self.look_backward, forward = self.look_forward, pad_value = self.pad_value ) bk = METHOD_NAME(bk, **look_around_kwargs) bv = METHOD_NAME(bv, **look_around_kwargs) bq_t = b_t bq_k = METHOD_NAME(b_t, **look_around_kwargs) bq_t = rearrange(bq_t, '... i -> ... i 1') bq_k = rearrange(bq_k, '... j -> ... 1 j') sim = einsum('b h i e, b h j e -> b h i j', bq, bk) * scale mask_value = max_neg_value(sim) if self.causal: causal_mask = bq_t < bq_k if self.exact_windowsize: max_causal_window_size = (self.window_size * self.look_backward) causal_mask = causal_mask | (bq_t > (bq_k + max_causal_window_size)) sim = sim.masked_fill(causal_mask, mask_value) del causal_mask # mask out padding value if self.autopad and needed_pad: pad_mask = bq_k == self.pad_value sim = sim.masked_fill(pad_mask, mask_value) del pad_mask if bin_attention_mask is not None: mask = bin_attention_mask.bool() batch = bin_attention_mask.shape[0] assert (b % batch) == 0 h = b // bin_attention_mask.shape[0] if self.autopad: _, mask = pad_to_multiple(mask, self.window_size, dim=-1, value=False) mask = rearrange(mask, '... (w n) -> (...) w n', w = windows, n = self.window_size) mask = METHOD_NAME(mask, **{**look_around_kwargs, 'pad_value': False}) mask = rearrange(mask, '... j -> ... 1 j') mask = repeat(mask, 'b ... -> (b h) ...', h = h) sim = sim.masked_fill(~mask, mask_value) del mask # attention attn = sim.softmax(dim = -1) attn = self.dropout(attn) # aggregation out = einsum('b h i j, b h j e -> b h i e', attn, bv) out = rearrange(out, 'b w n d -> b (w n) d') if self.autopad: out = out[:, :orig_seq_len, :] out, *_ = unpack(out, packed_shape, '* n d') return out class LocalMHA(nn.Module): def __init__( self, arch_config: ArchConfig, hidden_size: int, total_heads: int, op_heads: int, att_dropout = 0., prenorm = False, use_rotary: bool = True, **kwargs ): super().__init__() assert hidden_size % total_heads == 0, 'hidden size must be divisible by total heads' self.hidden_size = hidden_size self.total_heads = total_heads self.op_heads = op_heads head_size = self.hidden_size // self.total_heads self.op_size = head_size * self.op_heads self.norm = nn.LayerNorm(hidden_size) if prenorm else None self.to_qkv = nn.Linear(hidden_size, self.op_size * 3, bias = False) self.attn_fn = LocalAttention( window_size = arch_config.pick('window_size'), causal = True, autopad = True, exact_windowsize = True, dropout=att_dropout, rel_pos_emb_dim=(head_size if use_rotary else None), **kwargs ) def forward(self, hidden_states, bin_attention_mask: Optional[torch.LongTensor] = None, **kwargs): if self.norm is not None: hidden_states = self.norm(hidden_states) q, k, v = self.to_qkv(hidden_states).chunk(3, dim = -1) q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = self.op_heads), (q, k, v)) out = self.attn_fn(q, k, v, bin_attention_mask=bin_attention_mask) out = rearrange(out, 'b h n d -> b n (h d)') return out, None
null