{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\n\"\"\"\n\n\ndef gallery(\n urls: list[str],\n *,\n columns: bool,\n refresh: int,\n query_string: str = \"\",\n) -> str:\n extra = \"&\" + query_string if query_string else \"\"\n if columns:\n if refresh:\n return _columns_debug(urls, refresh, extra)\n return _columns(urls)\n assert refresh\n return _grid_debug(urls, refresh, extra)\n\n\ndef _columns(urls: list[str]) -> str:\n elements = []\n\n for url in urls:\n elements.append(\n f\"\"\"\n \n \n \n \"\"\"\n )\n\n elements.append(RESIZE_SCRIPT)\n\n images = \"\\n\".join(elements).replace(\"\\n\" + \" \" * 12, \"\\n\")\n\n head = \"Memegen.link | examples\\n\" + COLUMNS_STYLE\n body = f'
\\n{images}\\n
'\n\n return HTML.format(head=head, body=body)\n\n\ndef _columns_debug(urls: list[str], refresh: int, extra: str) -> str:\n elements = []\n\n for url in urls:\n elements.append(\n f\"\"\"\n \n \n \n \"\"\"\n )\n\n if refresh:\n elements.append(REFRESH_SCRIPT.replace(\"{interval}\", str(refresh * 1000)))\n\n images = \"\\n\".join(elements).replace(\"\\n\" + \" \" * 12, \"\\n\")\n\n head = \"Memegen.link | debug\\n\" + COLUMNS_STYLE\n body = f'
\\n{images}\\n
'\n\n return HTML.format(head=head, body=body)\n\n\ndef _grid_debug(urls: list[str], refresh: int, extra: str):\n elements = []\n\n for url in urls:\n elements.append(\n f\"\"\"\n \n \n \n \"\"\"\n )\n\n elements.append(REFRESH_SCRIPT.replace(\"{interval}\", str(refresh * 1000)))\n\n images = \"\\n\".join(elements).replace(\"\\n\" + \" \" * 12, \"\\n\")\n\n head = \"Memegen.link | test\\n\"\n body = images\n\n return HTML.format(head=head, body=body)\n\napp/utils/__init__.py METASEP\nfrom . import html, http, images, meta, text, urls\n\napp/tests/test_views_templates.py METASEP\nimport json\n\nimport pytest\n\n\ndef describe_list():\n def describe_GET():\n @pytest.mark.slow\n @pytest.mark.parametrize(\"slash\", [\"\", \"/\"])\n def it_returns_all_templates(expect, client, slash):\n request, response = client.get(\"/templates\" + slash, timeout=10)\n expect(response.status) == 200\n expect(len(response.json)) >= 140\n\n @pytest.mark.slow\n def it_can_filter_templates(expect, client):\n request, response = client.get(\"/templates?filter=awesome\", timeout=10)\n expect(response.status) == 200\n expect(len(response.json)) == 3\n\n\ndef describe_detail():\n def describe_GET():\n @pytest.mark.parametrize(\"slash\", [\"\", \"/\"])\n def it_includes_metadata(expect, client, slash):\n request, response = client.get(\"/templates/iw\" + slash)\n expect(response.status) == 200\n expect(response.json) == {\n \"id\": \"iw\",\n \"name\": \"Insanity Wolf\",\n \"lines\": 2,\n \"overlays\": 1,\n \"styles\": [\"default\"],\n \"blank\": \"http://localhost:5000/images/iw.png\",\n \"example\": {\n \"text\": [\"does testing\", \"in production\"],\n \"url\": \"http://localhost:5000/images/iw/does_testing/in_production.png\",\n },\n \"source\": \"http://knowyourmeme.com/memes/insanity-wolf\",\n \"_self\": \"http://localhost:5000/templates/iw\",\n }\n\n def it_returns_404_when_missing(expect, client):\n request, response = client.get(\"/templates/foobar\")\n expect(response.status) == 404\n\n def describe_POST():\n @pytest.mark.parametrize(\"as_json\", [True, False])\n def it_returns_an_image_url(expect, client, as_json):\n data = {\"text_lines[]\": [\"foo\", \"bar\"], \"extension\": \"jpg\"}\n kwargs: dict = {\"content\": json.dumps(data)} if as_json else {\"data\": data}\n request, response = client.post(\"/templates/iw\", **kwargs)\n\n expect(response.status) == 201\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/iw/foo/bar.jpg\"\n }\n\n @pytest.mark.parametrize(\"as_json\", [True, False])\n def it_supports_custom_backgrounds(expect, client, as_json):\n data = {\n \"background\": \"https://www.gstatic.com/webp/gallery/3.png\",\n \"text_lines[]\": [\"foo\", \"bar\"],\n \"extension\": \"jpg\",\n }\n kwargs: dict = {\"content\": json.dumps(data)} if as_json else {\"data\": data}\n request, response = client.post(\"/templates/custom\", **kwargs)\n expect(response.status) == 201\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/custom/foo/bar.jpg\"\n \"?background=https://www.gstatic.com/webp/gallery/3.png\"\n }\n\n @pytest.mark.parametrize(\"id\", [\"fry\", \"custom\"])\n def it_redirects_if_requested(expect, client, id):\n data = {\"text_lines\": [\"abc\"], \"redirect\": True}\n request, response = client.post(\n f\"/templates/{id}\", data=data, allow_redirects=False\n )\n redirect = f\"http://localhost:5000/images/{id}/abc.png?status=201\"\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == redirect\n\n def it_handles_unknown_template_id(expect, client, unknown_template):\n data = {\"text_lines\": [\"one\", \"two\"]}\n request, response = client.post(\n f\"/templates/{unknown_template.id}\", data=data\n )\n expect(response.status) == 404\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/unknown/one/two.png\"\n }\n\napp/tests/test_views_shortcuts.py METASEP\nimport pytest\n\nfrom .. import settings\n\n\ndef describe_image_redirects():\n @pytest.mark.parametrize(\"extension\", [\"png\", \"jpg\"])\n def it_redirects_to_normalized_slug(expect, client, extension):\n request, response = client.get(\n f\"/images/fry/One Two.{extension}\", allow_redirects=False\n )\n expect(response.status) == 301\n expect(response.headers[\"Location\"]) == f\"/images/fry/One_Two.{extension}\"\n\n @pytest.mark.parametrize(\"extension\", [\"png\", \"jpg\"])\n def it_preserves_query_params_when_redirecting(expect, client, extension):\n request, response = client.get(\n f\"/images/custom/One Two.{extension}?alt=http://example.com\",\n allow_redirects=False,\n )\n redirect = f\"/images/custom/One_Two.{extension}?alt=http://example.com\"\n expect(response.status) == 301\n expect(response.headers[\"Location\"]) == redirect\n\n def it_handles_encoded_newlines(expect, client):\n request, response = client.get(\"/images/fry/1 2%0A3.jpg\", allow_redirects=False)\n redirect = \"/images/fry/1_2~n3.jpg\"\n expect(response.status) == 301\n expect(response.headers[\"Location\"]) == redirect\n\n\ndef describe_path_redirects():\n def it_redirects_to_example_image_when_no_extension(expect, client):\n request, response = client.get(\"/images/fry\", allow_redirects=False)\n redirect = \"/images/fry/not_sure_if_trolling/or_just_stupid.png\"\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == redirect\n\n def it_redirects_to_custom_image_when_text_but_no_extension(expect, client):\n request, response = client.get(\"/images/fry/_XD\\\\XD\", allow_redirects=False)\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == \"/images/fry/_XD~bXD.png\"\n\n def it_returns_gallery_view_when_debug(expect, client, monkeypatch):\n monkeypatch.setattr(settings, \"DEBUG\", True)\n request, response = client.get(\"/images/fry/test\")\n expect(response.text).contains(\"/images/fry/test.png\")\n\n def it_rejects_unknown_templates(expect, client, unknown_template):\n request, response = client.get(\n f\"/images/{unknown_template.id}\", allow_redirects=False\n )\n expect(response.status) == 404\n\n def it_creates_new_templates_when_debug(\n expect, client, unknown_template, monkeypatch\n ):\n monkeypatch.setattr(settings, \"DEBUG\", True)\n request, response = client.get(\n f\"/images/{unknown_template.id}\", allow_redirects=False\n )\n expect(response.status) == 501\n expect(response.text).contains(\"Template not fully implemented\")\n\n def it_handles_sample_templates(expect, client, monkeypatch):\n monkeypatch.setattr(settings, \"DEBUG\", True)\n request, response = client.get(\"/images/\", allow_redirects=False)\n expect(response.status) == 501\n expect(response.text).contains(\"Replace '&lt;sample>' in the URL\")\n\n def it_handles_trailing_slashes(expect, client):\n request, response = client.get(\"/images/fry/\", allow_redirects=False)\n redirect = \"/images/fry\"\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == redirect\n\n\ndef describe_legacy_images():\n @pytest.mark.parametrize(\"extension\", [\"png\", \"jpg\"])\n def it_redirects_to_example_image(expect, client, extension):\n request, response = client.get(f\"/fry.{extension}\", allow_redirects=False)\n redirect = f\"/images/fry/not_sure_if_trolling/or_just_stupid.{extension}\"\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == redirect\n\n @pytest.mark.parametrize(\"extension\", [\"png\", \"jpg\"])\n def it_redirects_to_custom_image(expect, client, extension):\n request, response = client.get(f\"/fry/test.{extension}\", allow_redirects=False)\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == f\"/images/fry/test.{extension}\"\n\n\ndef describe_legacy_paths():\n @pytest.mark.parametrize(\"suffix\", [\"\", \".png\", \".jpg\"])\n def it_rejects_unknown_templates(expect, client, unknown_template, suffix):\n request, response = client.get(f\"/{unknown_template.id}{suffix}\")\n expect(response.status) == 404\n\n @pytest.mark.parametrize(\"suffix\", [\"\", \".png\", \".jpg\"])\n def it_rejects_unknown_templates_with_text(\n expect, client, unknown_template, suffix\n ):\n request, response = client.get(f\"/{unknown_template.id}/test{suffix}\")\n expect(response.status) == 404\n\n\ndef describe_legacy_params():\n @pytest.mark.slow\n def it_accepts_alt_for_template(expect, client):\n request, response = client.get(\n \"/images/custom/test.png?alt=https://www.gstatic.com/webp/gallery/3.jpg\"\n )\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n @pytest.mark.slow\n def it_accepts_alt_for_style(expect, client):\n request, response = client.get(\"/images/sad-biden/test.png?style=scowl\")\n expect(response.status) == 200\n\n request, response2 = client.get(\"/images/sad-biden/test.png?alt=scowl\")\n expect(response.status) == 200\n\n expect(len(response.content)) == len(response2.content)\n\napp/tests/test_views_memes.py METASEP\nimport json\nfrom unittest.mock import AsyncMock, patch\n\nimport pytest\n\nfrom .. import settings\n\n\ndef describe_list():\n def describe_GET():\n @pytest.mark.slow\n def it_returns_example_image_urls(expect, client):\n request, response = client.get(\"/images\", timeout=10)\n expect(response.status) == 200\n expect(response.json).contains(\n {\n \"url\": \"http://localhost:5000/images/iw/does_testing/in_production.png\",\n \"template\": \"http://localhost:5000/templates/iw\",\n }\n )\n\n @pytest.mark.slow\n def it_can_filter_examples(expect, client):\n request, response = client.get(\"/images?filter=awesome\", timeout=10)\n expect(response.status) == 200\n expect(len(response.json)) == 3\n\n def describe_POST():\n @pytest.mark.parametrize(\"as_json\", [True, False])\n def it_returns_an_image_url(expect, client, as_json):\n data = {\"template_id\": \"iw\", \"text_lines[]\": [\"foo\", \"bar\"]}\n kwargs: dict = {\"content\": json.dumps(data)} if as_json else {\"data\": data}\n request, response = client.post(\"/images\", **kwargs)\n expect(response.status) == 201\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/iw/foo/bar.png\"\n }\n\n def it_removes_redundant_styles(expect, client):\n data = {\n \"template_id\": \"iw\",\n \"text_lines[]\": [\"foo\", \"bar\"],\n \"style[]\": [\" \", \"test\", \"default\"],\n \"font\": \"impact\",\n }\n request, response = client.post(\"/images\", data=data)\n expect(response.status) == 201\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/iw/foo/bar.png?style=default,test&font=impact\"\n }\n\n def it_returns_gif_when_animated(expect, client):\n data = {\n \"template_id\": \"iw\",\n \"text_lines[]\": [\"foo\", \"bar\"],\n \"style\": \"animated\",\n }\n request, response = client.post(\"/images\", data=data)\n expect(response.status) == 201\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/iw/foo/bar.gif\"\n }\n\n def it_redirects_if_requested(expect, client):\n data = {\"template_id\": \"iw\", \"text_lines\": [\"abc\"], \"redirect\": True}\n request, response = client.post(\"/images\", data=data, allow_redirects=False)\n redirect = \"http://localhost:5000/images/iw/abc.png?status=201\"\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == redirect\n\n def it_requires_template_id(expect, client):\n data = {\"text_lines\": [\"foo\", \"bar\"]}\n request, response = client.post(\"/images\", data=data)\n expect(response.status) == 400\n expect(response.json) == {\"error\": '\"template_id\" is required'}\n\n def it_handles_unknown_template_id(expect, client, unknown_template):\n data = {\"template_id\": unknown_template.id, \"text_lines\": [\"one\", \"two\"]}\n request, response = client.post(\"/images\", data=data)\n expect(response.status) == 404\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/unknown/one/two.png\"\n }\n\n def it_handles_unknown_template_id_redirect(expect, client, unknown_template):\n data = {\n \"template_id\": unknown_template.id,\n \"text_lines\": [\"one\", \"two\"],\n \"redirect\": True,\n }\n request, response = client.post(\"/images\", data=data, allow_redirects=False)\n redirect = \"http://localhost:5000/images/unknown/one/two.png?status=201\"\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == redirect\n\n def it_handles_missing_text_lines(expect, client):\n data = {\"template_id\": \"iw\"}\n request, response = client.post(\"/images\", data=data)\n expect(response.status) == 201\n expect(response.json) == {\"url\": \"http://localhost:5000/images/iw.png\"}\n\n def it_drops_trailing_blank_lines(expect, client):\n data = {\"template_id\": \"iw\", \"text_lines\": [\"\", \"\", \"\", \"\"]}\n request, response = client.post(\"/images\", data=data)\n expect(response.status) == 201\n expect(response.json) == {\"url\": \"http://localhost:5000/images/iw.png\"}\n\n def it_supports_slashes_to_indicate_blank_lines(expect, client):\n data = {\"template_id\": \"iw\", \"text_lines\": [\"/\", \"2\", \"/\", \"\"]}\n request, response = client.post(\"/images\", data=data)\n expect(response.status) == 201\n expect(response.json) == {\"url\": \"http://localhost:5000/images/iw/_/2.png\"}\n\n def it_handles_invalid_json(expect, client):\n request, response = client.post(\"/images\", content=\"???\")\n expect(response.status) == 400\n expect(response.json) == {\"error\": '\"template_id\" is required'}\n\n\ndef describe_detail():\n @pytest.mark.slow\n @pytest.mark.parametrize(\n (\"path\", \"content_type\"),\n [\n (\"/images/fry.gif\", \"image/gif\"),\n (\"/images/fry.jpg\", \"image/jpeg\"),\n (\"/images/fry.png\", \"image/png\"),\n (\"/images/fry/test.gif\", \"image/gif\"),\n (\"/images/fry/test.jpg\", \"image/jpeg\"),\n (\"/images/fry/test.png\", \"image/png\"),\n ],\n )\n def it_returns_an_image(expect, client, path, content_type):\n request, response = client.get(path, timeout=10)\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == content_type\n\n def it_handles_placeholder_templates(expect, client):\n request, response = client.get(\"/images/string/test.png\")\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def it_handles_unknown_templates(expect, client, unknown_template):\n request, response = client.get(f\"/images/{unknown_template.id}/test.png\")\n expect(response.status) == 404\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def it_rejects_invalid_extensions(expect, client):\n request, response = client.get(\"/images/fry/test.foobar\")\n expect(response.status) == 422\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def it_rejects_extremely_small_sizes(expect, client):\n request, response = client.get(\"/images/fry/test.jpg?width=9\")\n expect(response.status) == 422\n expect(response.headers[\"content-type\"]) == \"image/jpeg\"\n\n def it_rejects_invalid_sizes(expect, client):\n request, response = client.get(\"/images/fry/test.jpg?width=abc\")\n expect(response.status) == 422\n expect(response.headers[\"content-type\"]) == \"image/jpeg\"\n\n def it_rejects_extremely_long_urls(expect, client):\n text = \"test-\" * 50\n request, response = client.get(f\"/images/fry/{text}.jpg\")\n expect(response.status) == 414\n expect(response.headers[\"content-type\"]) == \"image/jpeg\"\n\n def describe_font():\n def it_rejects_unknown_fonts(expect, client):\n request, response = client.get(\"/images/fry/test.png?font=foobar\")\n expect(response.status) == 422\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def it_ignores_placeholder_values(expect, client):\n request, response = client.get(\"/images/fry/test.png?font=string\")\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def describe_watermark():\n @pytest.fixture(autouse=True)\n def watermark_settings(monkeypatch, client):\n monkeypatch.setattr(settings, \"ALLOWED_WATERMARKS\", [\"example.com\"])\n\n @pytest.fixture\n def default_content(watermark_settings, client):\n request, response = client.get(\"/images/fry/test.png\")\n return response.content\n\n def it_returns_a_unique_image(expect, client, default_content):\n request, response = client.get(\n \"/images/fry/test.png?watermark=example.com\",\n allow_redirects=False,\n )\n expect(response.status) == 200\n expect(len(response.content)) != len(default_content)\n\n @pytest.mark.parametrize(\"extension\", [\"png\", \"jpg\"])\n def it_rejects_unknown_watermarks(expect, client, extension):\n request, response = client.get(\n f\"/images/fry/test.{extension}?watermark=foobar\",\n allow_redirects=False,\n )\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == f\"/images/fry/test.{extension}\"\n\n @pytest.mark.parametrize(\"extension\", [\"png\", \"jpg\"])\n def it_removes_redundant_watermarks(expect, client, extension):\n request, response = client.get(\n f\"/images/fry/test.{extension}?watermark=memegen.link\",\n allow_redirects=False,\n )\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == f\"/images/fry/test.{extension}\"\n\n @patch(\n \"app.utils.meta.authenticate\",\n AsyncMock(return_value={\"image_access\": True}),\n )\n def it_accepts_custom_values_when_authenticated(expect, client):\n request, response = client.get(\n \"/images/fry/test.png?watermark=mydomain.com\",\n allow_redirects=False,\n )\n expect(response.status) == 200\n\n def it_rejects_invalid_authentication(expect, client):\n request, response = client.get(\n \"/images/fry/test.png?watermark=blank\",\n headers={\"X-API-KEY\": \"foobar\"},\n allow_redirects=False,\n )\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == \"/images/fry/test.png\"\n\n def it_is_disabled_automatically_for_small_images(expect, client):\n small_content = client.get(\"/images/fry/test.png?width=300\")[1].content\n request, response = client.get(\n \"/images/fry/test.png?width=300&watermark=example.com\",\n allow_redirects=False,\n )\n expect(response.status) == 200\n expect(len(response.content)) == len(small_content)\n\n def describe_styles():\n @pytest.fixture(\n params=[\n \"/images/ds/one/two.png?\",\n \"/images/custom/test.png?background=https://www.gstatic.com/webp/gallery/3.jpg&\",\n ]\n )\n def base_url(request):\n return request.param\n\n @pytest.mark.slow\n def it_supports_alternate_styles(expect, client):\n request, response = client.get(\"/images/ds/one/two.png?style=maga\")\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n @pytest.mark.parametrize(\"slug\", [\"ds\", \"ds/one/two\"])\n def it_redirects_to_gif_when_animated(expect, client, slug):\n request, response = client.get(\n f\"/images/{slug}.png?style=animated\", allow_redirects=False\n )\n redirect = f\"/images/{slug}.gif\"\n expect(response.status) == 301\n expect(response.headers[\"Location\"]) == redirect\n\n @pytest.mark.slow\n def it_rejects_invalid_styles(expect, client, base_url):\n request, response = client.get(base_url + \"style=foobar\")\n expect(response.status) == 422\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n @pytest.mark.slow\n def it_ignores_placeholder_values(expect, client, base_url):\n request, response = client.get(base_url + \"style=string\")\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def describe_overlay():\n @pytest.fixture(\n params=[\n \"/images/fine/test.png?\",\n \"/images/custom/test.png?background=https://www.gstatic.com/webp/gallery/3.jpg&\",\n ]\n )\n def base_url(request):\n return request.param\n\n def it_supports_custom_styles(expect, client, base_url):\n request, response = client.get(\n base_url + \"style=https://www.gstatic.com/webp/gallery/4.jpg\"\n )\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n @pytest.mark.slow\n def it_requires_image_urls(expect, client, base_url):\n request, response = client.get(base_url + \"style=http://example.com\")\n expect(response.status) == 415\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n @pytest.mark.slow\n def it_handles_missing_urls(expect, client, base_url):\n request, response = client.get(\n base_url + \"style=http://example.com/does_not_exist.png\"\n )\n expect(response.status) == 415\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def describe_custom():\n def it_supports_custom_templates(expect, client):\n request, response = client.get(\n \"/images/custom/test.png\"\n \"?background=https://www.gstatic.com/webp/gallery/3.jpg\"\n )\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def it_requires_an_image_with_custom_templates(expect, client):\n request, response = client.get(\"/images/custom/test.png\")\n expect(response.status) == 422\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def it_handles_invalid_urls(expect, client):\n request, response = client.get(\"/images/custom/test.png?background=foobar\")\n expect(response.status) == 415\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def it_handles_missing_urls(expect, client):\n request, response = client.get(\n \"/images/custom/test.png\"\n \"?background=http://example.com/does_not_exist.png\"\n )\n expect(response.status) == 415\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n def it_ignores_placeholder_values(expect, client):\n request, response = client.get(\n \"/images/custom/string.png?background=string\"\n )\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/png\"\n\n\ndef describe_automatic():\n def describe_POST():\n def it_requires_text(expect, client):\n request, response = client.post(\"/images/automatic\")\n expect(response.status) == 400\n expect(response.json) == {\"error\": '\"text\" is required'}\n\n @patch(\n \"app.utils.meta.search\",\n AsyncMock(\n return_value=[\n {\n \"image_url\": \"http://example.com/images/example.png\"\n + \"?background=https://www.gstatic.com/webp/gallery/3.png\",\n \"confidence\": 0.5,\n }\n ]\n ),\n )\n @pytest.mark.parametrize(\"as_json\", [True, False])\n def it_normalizes_the_url(expect, client, as_json):\n data = {\"text\": \"example\"}\n kwargs: dict = {\"content\": json.dumps(data)} if as_json else {\"data\": data}\n request, response = client.post(\"/images/automatic\", **kwargs)\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/example.png\"\n + \"?background=https://www.gstatic.com/webp/gallery/3.png\",\n \"confidence\": 0.5,\n }\n\n def it_handles_invalid_json(expect, client):\n request, response = client.post(\"/images/automatic\", content=\"???\")\n expect(response.status) == 400\n expect(response.json) == {\"error\": '\"text\" is required'}\n\n\ndef describe_custom():\n def describe_POST():\n @pytest.mark.parametrize(\"as_json\", [True, False])\n def it_supports_custom_backgrounds(expect, client, as_json):\n data = {\n \"background\": \"https://www.gstatic.com/webp/gallery/3.png\",\n \"text_lines[]\": [\"foo\", \"bar\"],\n \"extension\": \"jpg\",\n }\n kwargs: dict = {\"content\": json.dumps(data)} if as_json else {\"data\": data}\n request, response = client.post(\"/images/custom\", **kwargs)\n expect(response.status) == 201\n expect(response.json) == {\n \"url\": \"http://localhost:5000/images/custom/foo/bar.jpg\"\n \"?background=https://www.gstatic.com/webp/gallery/3.png\"\n }\n\n def it_redirects_if_requested(expect, client):\n data = {\n \"background\": \"https://www.gstatic.com/webp/gallery/4.png\",\n \"text_lines\": [\"abc\"],\n \"redirect\": True,\n }\n request, response = client.post(\n \"/images/custom\", data=data, allow_redirects=False\n )\n redirect = \"http://localhost:5000/images/custom/abc.png?background=https://www.gstatic.com/webp/gallery/4.png&status=201\"\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == redirect\n\n def describe_GET():\n @patch(\n \"app.utils.meta.search\",\n AsyncMock(\n return_value=[{\"image_url\": \"http://example.com/images/example.png\"}]\n ),\n )\n def it_normalizes_the_url(expect, client):\n request, response = client.get(\"/images/custom\")\n expect(response.json) == [\n {\"url\": \"http://localhost:5000/images/example.png\"}\n ]\n\n @patch(\n \"app.utils.meta.search\",\n AsyncMock(\n return_value=[\n {\n \"image_url\": \"http://example.com/images/example.png\"\n + \"?background=https://www.gstatic.com/webp/gallery/3.png\"\n }\n ]\n ),\n )\n def it_normalizes_the_url_with_background(expect, client):\n request, response = client.get(\"/images/custom\")\n expect(response.json) == [\n {\n \"url\": \"http://localhost:5000/images/example.png\"\n + \"?background=https://www.gstatic.com/webp/gallery/3.png\"\n }\n ]\n\napp/tests/test_views_clients.py METASEP\nimport pytest\n\n\ndef describe_auth():\n def describe_POST():\n def it_returns_401_when_unauthenticated(expect, client):\n request, response = client.post(\"/auth\")\n expect(response.status) == 401\n expect(response.json) == {\"error\": \"API key missing or invalid.\"}\n\n\ndef describe_fonts():\n def describe_GET():\n def it_returns_all_fonts(expect, client):\n request, response = client.get(\"/fonts\")\n expect(len(response.json)) == 6\n\n\ndef describe_image_preview():\n @pytest.fixture\n def path():\n return \"/images/preview.jpg\"\n\n def it_returns_an_image(expect, client, path):\n request, response = client.get(path)\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/jpeg\"\n\n def it_supports_custom_templates(expect, client, path):\n request, response = client.get(\n path + \"?template=https://www.gstatic.com/webp/gallery/1.png\"\n )\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/jpeg\"\n\n def it_handles_invalid_urls(expect, client, path):\n request, response = client.get(path + \"?template=http://example.com/foobar.jpg\")\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/jpeg\"\n\n def it_handles_invalid_keys(expect, client, path, unknown_template):\n request, response = client.get(path + f\"?template={unknown_template.id}\")\n expect(response.status) == 200\n expect(response.headers[\"content-type\"]) == \"image/jpeg\"\n\napp/tests/test_utils_text.py METASEP\nimport pytest\n\nfrom .. import utils\n\nLINES_SLUG = [\n ([\"hello world\"], \"hello_world\"),\n ([\"?%#/&\\\\<>\"], \"~q~p~h~s~a~b~l~g\"),\n ([\"a/b\", \"c\"], \"a~sb/c\"),\n ([\"variable_name\"], \"variable__name\"),\n ([\"variable-name\"], \"variable--name\"),\n ([\"foo\\nbar\"], \"foo~nbar\"),\n ([\"def data() -> Dict\"], \"def_data()_--~g_Dict\"),\n ([\"finish <- start\"], \"finish_~l--_start\"),\n (['That\\'s not how \"this\" works'], \"That's_not_how_''this''_works\"),\n ([\"git commit --no-verify\"], \"git_commit_----no--verify\"),\n]\n\n\n@pytest.mark.parametrize((\"lines\", \"slug\"), LINES_SLUG)\ndef test_encode(expect, lines, slug):\n expect(utils.text.encode(lines)) == slug\n\n\n@pytest.mark.parametrize((\"lines\", \"slug\"), LINES_SLUG)\ndef test_decode(expect, lines, slug):\n expect(utils.text.decode(slug)) == lines\n\n\ndef test_decode_dashes(expect):\n expect(utils.text.decode(\"hello-world\")) == [\"hello world\"]\n\n\ndef test_encode_quotes(expect):\n expect(\n utils.text.encode([\"it’ll be great “they” said\"])\n ) == 'it\\'ll_be_great_\"they\"_said'\n\n\ndef test_encode_dashes(expect):\n expect(utils.text.encode([\"1–2 in. of snow\"])) == \"1-2_in._of_snow\"\n\napp/tests/test_utils_meta.py METASEP\nimport pytest\n\nfrom .. import settings, utils\n\n\ndef describe_tokenize():\n @pytest.mark.asyncio\n async def it_restricts_sample_api_key_usage(expect, request):\n request.args = {\"api_key\": \"myapikey42\"}\n request.headers = {}\n\n url, updated = await utils.meta.tokenize(\n request, \"http://api.memegen.link/images/fry/test.png?api_key=myapikey42\"\n )\n\n expect(url) == \"http://api.memegen.link/images/fry/test.png\"\n expect(updated) == True\n\n\ndef describe_track():\n @pytest.mark.asyncio\n async def it_is_disabled_automatically_after_error(expect, monkeypatch, request):\n monkeypatch.setattr(settings, \"REMOTE_TRACKING_URL\", \"http://example.com/404\")\n monkeypatch.setattr(settings, \"REMOTE_TRACKING_ERRORS_LIMIT\", 1)\n request.args = {}\n request.headers = {}\n request.host = \"example.com\"\n request.url = \"http://example.com\"\n\n await utils.meta.track(request, [\"foo\"])\n await utils.meta.track(request, [\"bar\"])\n\n expect(settings.TRACK_REQUESTS) == False\n\napp/tests/test_utils_images.py METASEP\nimport os\nimport shutil\nimport time\nfrom pathlib import Path\n\nimport pytest\n\nfrom .. import models, settings, utils\n\n\n@pytest.fixture(scope=\"session\")\ndef images():\n path = settings.TEST_IMAGES_DIRECTORY\n\n flag = path / \".flag\"\n if flag.exists():\n age = time.time() - flag.stat().st_mtime\n if age > 60 * 60 * 6 and \"SKIP_SLOW\" not in os.environ:\n shutil.rmtree(path)\n\n path.mkdir(exist_ok=True)\n flag.touch()\n\n return path\n\n\n@pytest.fixture(scope=\"session\")\ndef template():\n return models.Template.objects.get(\"icanhas\")\n\n\n# Formats\n\n\n@pytest.mark.slow\n@pytest.mark.parametrize((\"id\", \"lines\", \"extension\"), settings.TEST_IMAGES)\ndef test_images(images, id, lines, extension):\n template = models.Template.objects.get(id)\n utils.images.save(template, lines, extension=extension, directory=images)\n\n\n# Size\n\n\ndef test_smaller_width(images, template):\n utils.images.save(template, [\"width=250\"], size=(250, 0), directory=images)\n\n\ndef test_smaller_height(images, template):\n utils.images.save(template, [\"height=250\"], size=(0, 250), directory=images)\n\n\ndef test_larger_width(images, template):\n utils.images.save(template, [\"width=500\"], size=(500, 0), directory=images)\n\n\ndef test_larger_height(images, template):\n utils.images.save(template, [\"height=500\"], size=(0, 500), directory=images)\n\n\ndef test_wide_padding(images, template):\n lines = [\"width=600\", \"height=400\"]\n utils.images.save(template, lines, size=(600, 400), directory=images)\n\n\ndef test_tall_padding(images, template):\n lines = [\"width=400\", \"height=600\"]\n utils.images.save(template, lines, size=(400, 600), directory=images)\n\n\ndef test_small_padding(images, template):\n lines = [\"width=50\", \"height=50\"]\n utils.images.save(template, lines, size=(50, 50), directory=images)\n\n\n@pytest.mark.slow\ndef test_large_padding(images, template):\n lines = [\"width=2000\", \"height=2000\"]\n utils.images.save(template, lines, size=(2000, 2000), directory=images)\n\n\n# Templates\n\n\n@pytest.mark.asyncio\nasync def test_custom_template(images):\n url = \"https://www.gstatic.com/webp/gallery/2.jpg\"\n template = await models.Template.create(url)\n utils.images.save(template, [\"\", \"My Custom Template\"], directory=images)\n\n\n@pytest.mark.slow\n@pytest.mark.asyncio\nasync def test_custom_template_with_exif_rotation(images):\n url = \"https://cdn.discordapp.com/attachments/752902976322142218/752903391281283152/20200608_111430.jpg\"\n template = await models.Template.create(url)\n utils.images.save(template, [\"\", \"This should not be rotated!\"], directory=images)\n\n\ndef test_unknown_template(images):\n template = models.Template.objects.get(\"_error\")\n utils.images.save(template, [\"UNKNOWN TEMPLATE\"], directory=images)\n\n\n# Styles\n\n\n@pytest.mark.slow\ndef test_alternate_style(images):\n template = models.Template.objects.get(\"ds\")\n lines = [\"one\", \"two\", \"three\"]\n utils.images.save(template, lines, style=\"maga\", directory=images)\n\n\n@pytest.mark.slow\n@pytest.mark.asyncio\nasync def test_custom_style(images):\n url = \"https://sn56.scholastic.com/content/dam/classroom-magazines/sn56/issues/2019-20/031620/coronavirus/16-SN56-20200316-VirusOutbreak-PO-2.png\"\n template = models.Template.objects.get(\"fine\")\n await template.check(url, force=True)\n lines = [\"102 °F\", \"this is fine\"]\n utils.images.save(template, lines, style=url, directory=images)\n\n\n@pytest.mark.slow\n@pytest.mark.asyncio\nasync def test_custom_style_rotated(images):\n style = \"https://i.imgur.com/6hwAxmO.jpg,https://i.imgur.com/6hwAxmO.jpg\"\n template = models.Template.objects.get(\"same\")\n await template.check(style, force=True)\n utils.images.save(template, [], style=style, directory=images)\n\n\n# Text\n\n\ndef test_special_characters(images, template):\n lines = [\"Special? 100% #these-memes\", \"template_rating: 9/10\"]\n utils.images.save(template, lines, directory=images)\n\n\n@pytest.mark.skipif(\"CIRCLECI\" in os.environ, reason=\"Long filenames not supported\")\ndef test_extremely_long_text(images, tmpdir):\n template = models.Template.objects.get(\"fry\")\n lines = [\"\", \"word \" * 40]\n utils.images.save(template, lines, directory=Path(tmpdir) / \"images\")\n\n\ndef test_long_first_word(images):\n template = models.Template.objects.get(\"fine\")\n lines = [\"\", \"thiiiiiiiiiiiiiiiiiiiiis will probably be fine right now\"]\n utils.images.save(template, lines, directory=images)\n\n\n@pytest.mark.slow\ndef test_text_wrap_when_font_is_too_small(images):\n template = models.Template.objects.get(\"ds\")\n lines = [\"this button seems to be ok to push\"]\n utils.images.save(template, lines, directory=images)\n\n\ndef test_text_wrap_on_small_images(images):\n template = models.Template.objects.get(\"pigeon\")\n lines = [\"\", \"multiple words here\"]\n utils.images.save(template, lines, size=(0, 300), directory=images)\n\n\ndef test_text_wrap_on_smaller_images(images):\n template = models.Template.objects.get(\"toohigh\")\n lines = [\"\", \"the number of sample memes is too damn high!\"]\n utils.images.save(template, lines, size=(0, 200), directory=images)\n\n\n@pytest.mark.slow\ndef test_descender_vertical_alignment(images):\n template = models.Template.objects.get(\"ptj\")\n lines = [\n \"Exit\",\n \"Exit\",\n \"the\",\n \"the\",\n \"monorepo\",\n \"monorepo\",\n \"Exit the monorepo.\",\n \"Stop testing!\",\n ]\n utils.images.save(template, lines, directory=images)\n\n\n# Fonts\n\n\ndef test_font_override(images, template):\n lines = [\"custom\", \"font\"]\n utils.images.save(template, lines, font_name=\"comic\", directory=images)\n\n\n# Watermark\n\n\ndef test_watermark(images, template):\n lines = [\"nominal image\", \"with watermark\"]\n utils.images.save(template, lines, \"Example.com\", directory=images)\n\n\ndef test_watermark_with_padding(images, template):\n lines = [\"paddded image\", \"with watermark\"]\n utils.images.save(template, lines, \"Example.com\", size=(500, 500), directory=images)\n\n\ndef test_watermark_disabled_when_small(images, template):\n lines = [\"small image\", \"with watermark (disabled)\"]\n utils.images.save(template, lines, \"Example.com\", size=(300, 0), directory=images)\n\n\n@pytest.mark.slow\ndef test_watermark_with_many_lines(images):\n template = models.Template.objects.get(\"ptj\")\n lines = [\"\", \"\", \"\", \"\", \"\", \"\", \"Has a watermark.\", \"Doesn't have a watermark!\"]\n utils.images.save(template, lines, \"Example.com\", directory=images)\n\n\n# Debug\n\n\n@pytest.mark.parametrize((\"extension\"), [\"png\", \"gif\"])\ndef test_debug_images(images, monkeypatch, extension):\n monkeypatch.setattr(settings, \"DEBUG\", True)\n\n id, lines, _extension = settings.TEST_IMAGES[0]\n template = models.Template.objects.get(id)\n lines = [lines[0], lines[1] + \" (debug)\"]\n utils.images.save(\n template, lines, directory=images, extension=extension, maximum_frames=5\n )\n\n\ndef test_deployed_images(images, monkeypatch):\n monkeypatch.setattr(settings, \"DEPLOYED\", True)\n\n id, lines, _extension = settings.TEST_IMAGES[0]\n template = models.Template.objects.get(id)\n utils.images.save(template, lines, directory=images)\n\n monkeypatch.delattr(utils.images, \"render_image\")\n utils.images.save(template, lines, directory=images)\n\n\ndef test_preview_images(images, template):\n path = images / \"preview.jpg\"\n data, _extension = utils.images.preview(template, [\"nominal image\", \"while typing\"])\n path.write_bytes(data)\n\napp/tests/test_models_text.py METASEP\nimport pytest\n\nfrom ..models import Text\n\n\ndef describe_text():\n def describe_stylize():\n @pytest.mark.parametrize(\n (\"style\", \"before\", \"after\"),\n [\n (\"none\", \"Hello, world!\", \"Hello, world!\"),\n (\"default\", \"these are words.\", \"These are words.\"),\n (\"default\", \"These ARE words.\", \"These ARE words.\"),\n (\"upper\", \"Hello, world!\", \"HELLO, WORLD!\"),\n (\"lower\", \"Hello, world!\", \"hello, world!\"),\n (\"title\", \"these are words\", \"These Are Words\"),\n (\"capitalize\", \"these are words\", \"These are words\"),\n (\"mock\", \"these are words\", \"ThEsE aRe WorDs\"),\n (\"\", \"Hello, world!\", \"Hello, world!\"),\n ],\n )\n def it_applies_style(expect, style, before, after):\n text = Text()\n text.style = style\n expect(text.stylize(before)) == after\n\n def it_defaults_to_upper(expect):\n text = Text()\n text.style = \"\"\n expect(text.stylize(\"Foobar\")) == \"FOOBAR\"\n\n def it_respects_case_when_set_in_any_line(expect):\n text = Text(style=\"default\")\n expect(text.stylize(\"foo\", lines=[\"foo\", \" \", \"bar\"])) == \"Foo\"\n expect(text.stylize(\"foo\", lines=[\"foo\", \" \", \"Bar\"])) == \"foo\"\n\napp/tests/test_models_template.py METASEP\nfrom pathlib import Path\n\nimport datafiles\nimport log\nimport pytest\n\nfrom ..models import Overlay, Template, Text\n\n\ndef describe_template():\n @pytest.fixture\n def template():\n t = Template.objects.get(\"_test\")\n t.clean()\n yield t\n t.clean()\n\n def describe_str():\n def it_includes_the_path(expect, template):\n expect(str(template)).endswith(\"/memegen/templates/_test\")\n\n def describe_valid():\n def it_only_includes_default_style_with_custom_overlay(\n expect, template, monkeypatch\n ):\n monkeypatch.setattr(datafiles.settings, \"HOOKS_ENABLED\", False)\n\n template.overlay = [Overlay()]\n expect(template.styles) == []\n\n del template.styles\n template.overlay[0].center_x = 0.123\n expect(template.styles) == [\"default\"]\n\n def describe_text():\n def it_defaults_to_two_lines(expect, template):\n expect(template.text) == [Text(), Text(anchor_x=0.0, anchor_y=0.8)]\n\n def describe_image():\n def it_has_generic_extension_when_absent(expect, template):\n expect(template.image) == Path.cwd() / \"templates\" / \"_test\" / \"default.img\"\n\n def it_creates_template_directory_automatically(expect):\n template = Template.objects.get_or_create(\"_custom-empty\")\n template.datafile.path.unlink(missing_ok=True)\n template.datafile.path.parent.rmdir()\n log.info(template.image)\n expect(template.datafile.path.parent.exists()) == True\n\n def describe_create():\n @pytest.mark.asyncio\n async def it_downloads_the_image(expect):\n url = \"https://www.gstatic.com/webp/gallery/1.jpg\"\n path = (\n Path.cwd()\n / \"templates\"\n / \"_custom-2d3c91e23b91d6387050e85efc1f3acb39b5a95d\"\n / \"default.jpg\"\n )\n template = await Template.create(url, force=True)\n expect(template.image) == path\n expect(template.image.exists()) == True\n\n @pytest.mark.asyncio\n async def it_handles_missing_urls(expect):\n url = \"http://example.com/does_not_exist.png\"\n template = await Template.create(url)\n expect(template.image.exists()) == False\n\n @pytest.mark.asyncio\n async def it_handles_unreachable_urls(expect):\n url = \"http://127.0.0.1/does_not_exist.png\"\n template = await Template.create(url)\n expect(template.image.exists()) == False\n\n @pytest.mark.asyncio\n @pytest.mark.parametrize(\n \"url\",\n [\n \"httpshttps://cdn.pixabay.com/photo/2015/09/09/19/41/cat-932846_1280.jpg\",\n \"https://https://i.imgur.com/bf995.gif&width=400\",\n ],\n )\n async def it_handles_invalid_urls(expect, url):\n template = await Template.create(url)\n expect(template.valid) == False\n\n @pytest.mark.asyncio\n async def it_rejects_non_images(expect):\n url = \"https://file-examples-com.github.io/uploads/2017/04/file_example_MP4_480_1_5MG.mp4\"\n template = await Template.create(url)\n expect(template.image.exists()) == False\n\n @pytest.mark.asyncio\n @pytest.mark.parametrize(\"subdomain\", [\"api\", \"staging\"])\n async def it_handles_builtin_templates(expect, subdomain):\n url = f\"http://{subdomain}.memegen.link/images/fry.png\"\n template = await Template.create(url)\n expect(template.id) == \"fry\"\n\n @pytest.mark.asyncio\n async def it_handles_invalid_builtin_templates(expect):\n url = \"http://api.memegen.link/images/fry2.png\"\n template = await Template.create(url)\n expect(template.id) == \"_error\"\n\n @pytest.mark.asyncio\n async def it_handles_custom_templates(expect):\n url = \"http://api.memegen.link/images/custom.png?background=https://www.gstatic.com/webp/gallery/1.jpg\"\n template = await Template.create(url)\n expect(template.id) == \"_custom-2d3c91e23b91d6387050e85efc1f3acb39b5a95d\"\n\n @pytest.mark.asyncio\n async def it_handles_custom_templates_lacking_background(expect):\n url = \"http://api.memegen.link/images/custom.png?background\"\n template = await Template.create(url)\n expect(template.id) == \"_error\"\n\n @pytest.mark.asyncio\n async def it_handles_custom_templates_with_invalid_background(expect):\n url = \"http://api.memegen.link/images/custom.png?background=https://https://example.com\"\n template = await Template.create(url)\n expect(template.id) == \"_error\"\n\n @pytest.mark.asyncio\n async def it_handles_meme_urls(expect):\n url = \"http://api.memegen.link/images/fry/test.png\"\n template = await Template.create(url)\n expect(template.id) == \"fry\"\n\n def describe_check():\n @pytest.mark.asyncio\n async def it_determines_overlay_file_extension(expect):\n url = \"https://i.guim.co.uk/img/media/8a13052d4db7dcd508af948e5db7b04598e03190/0_294_5616_3370/master/5616.jpg?width=1200&height=1200&quality=85&auto=format&fit=crop&s=bcaa4eed2c1e6dab61c41a61e41433d9\"\n template = Template.objects.get(\"fine\")\n expect(await template.check(url, force=True)) == True\n\n @pytest.mark.asyncio\n async def it_assumes_extension_when_unknown(expect):\n url = \"https://camo.githubusercontent.com/ce9c7a173f38722e129d5ae832a11c928ff72683fae74cbcb9fff41fd9957e63/68747470733a2f2f75706c6f61642e77696b696d656469612e6f72672f77696b6970656469612f636f6d6d6f6e732f7468756d622f332f33662f4769745f69636f6e2e7376672f3130323470782d4769745f69636f6e2e7376672e706e67\"\n template = Template.objects.get(\"fine\")\n expect(await template.check(url, force=True)) == True\n\n @pytest.mark.asyncio\n async def it_accepts_multiple_urls(expect):\n style = \",\".join(\n [\n \"https://user-images.githubusercontent.com/674621/71187801-14e60a80-2280-11ea-94c9-e56576f76baf.png\",\n \"https://i.stack.imgur.com/PvgbL.png\",\n \"https://www.nicepng.com/png/detail/13-139107_notepad-icon-icon-anotacoes-png.png\",\n ]\n )\n template = Template.objects.get(\"perfection\")\n expect(await template.check(style)) == True\n\n @pytest.mark.asyncio\n async def it_accepts_default_style_as_placeholder(expect):\n style = \"default,https://i.stack.imgur.com/PvgbL.png\"\n template = Template.objects.get(\"perfection\")\n expect(await template.check(style)) == True\n\napp/tests/test_main.py METASEP\nimport pytest\n\nfrom .. import settings\n\n\ndef describe_index():\n def it_redirects_to_the_docs(expect, client):\n request, response = client.get(\"/\")\n expect(response.status) == 200\n expect(response.text).contains(\"openapi.json\")\n\n def it_contains_favicon(expect, client):\n request, response = client.get(\"/favicon.ico\")\n expect(response.status) == 200\n\n def it_contains_robots(expect, client):\n request, response = client.get(\"/robots.txt\")\n expect(response.status) == 200\n expect(response.text).contains(\"Allow: /\\n\")\n\n\ndef describe_examples():\n @pytest.mark.slow\n def it_displays_images(expect, client):\n request, response = client.get(\"/examples\", timeout=10)\n expect(response.status) == 200\n expect(response.text.count(\"img\")) > 100\n expect(response.text).excludes(\"setInterval\")\n\n @pytest.mark.slow\n def it_can_enable_automatic_refresh(expect, client, monkeypatch):\n monkeypatch.setattr(settings, \"DEBUG\", True)\n request, response = client.get(\"/examples?debug=true\", timeout=10)\n expect(response.status) == 200\n expect(response.text.count(\"img\")) > 100\n expect(response.text).includes(\"setInterval\")\n\n\ndef describe_test():\n def it_redirects_to_the_index(expect, client):\n request, response = client.get(\"/test\", allow_redirects=False)\n expect(response.status) == 302\n expect(response.headers[\"Location\"]) == \"/\"\n\n def it_displays_test_images_when_debug(expect, client, monkeypatch):\n monkeypatch.setattr(settings, \"DEBUG\", True)\n request, response = client.get(\"/test\", allow_redirects=False)\n expect(response.status) == 200\n expect(response.text.count(\"img\")) > 5\n expect(response.text.count(\"img\")) < 100\n\napp/tests/test_docs.py METASEP\nfrom pkg_resources import get_distribution\n\n\ndef describe_spec():\n def it_contains_the_version(expect, client):\n version = get_distribution(\"memegen\").version\n request, response = client.get(\"/docs/openapi.json\")\n expect(response.status) == 200\n expect(response.json[\"info\"][\"version\"]) == version\n\napp/tests/conftest.py METASEP\nimport os\n\nimport pytest\n\nfrom .. import settings\nfrom ..main import app\nfrom ..models import Template\n\n\ndef pytest_configure(config):\n terminal = config.pluginmanager.getplugin(\"terminal\")\n terminal.TerminalReporter.showfspath = False\n\n\ndef pytest_runtest_setup(item):\n for marker in item.iter_markers(name=\"slow\"):\n if \"SKIP_SLOW\" in os.environ:\n pytest.skip(\"slow test\")\n\n\n@pytest.fixture\ndef client(monkeypatch):\n monkeypatch.setattr(settings, \"REMOTE_TRACKING_URL\", None)\n return app.test_client\n\n\n@pytest.fixture\ndef unknown_template():\n template = Template.objects.get_or_create(\"unknown\")\n template.delete()\n yield template\n template.delete()\n\napp/tests/__init__.py METASEP\n\napp/models/text.py METASEP\nfrom dataclasses import dataclass\n\nfrom sanic.log import logger\nfrom spongemock import spongemock\n\nfrom .. import settings\nfrom ..types import Dimensions, Point\n\n\n@dataclass\nclass Text:\n\n style: str = \"upper\"\n color: str = \"white\"\n font: str = settings.DEFAULT_FONT\n\n anchor_x: float = 0.0\n anchor_y: float = 0.0\n\n angle: float = 0\n\n scale_x: float = 1.0\n scale_y: float = 0.2\n\n start: float = 0.0\n stop: float = 1.0\n\n @classmethod\n def get_preview(cls) -> \"Text\":\n return cls(\n color=\"#80808060\",\n anchor_x=0.075,\n anchor_y=0.05,\n angle=10,\n scale_x=0.75,\n scale_y=0.75,\n )\n\n @classmethod\n def get_error(cls) -> \"Text\":\n return cls(color=\"yellow\", anchor_x=0.5)\n\n @classmethod\n def get_watermark(cls) -> \"Text\":\n return cls(color=\"#FFFFFF85\")\n\n def get_anchor(self, image_size: Dimensions, watermark: str = \"\") -> Point:\n image_width, image_height = image_size\n anchor = int(image_width * self.anchor_x), int(image_height * self.anchor_y)\n if watermark and self.anchor_x <= 0.1 and self.anchor_y >= 0.8:\n anchor = anchor[0], anchor[1] - settings.WATERMARK_HEIGHT // 2\n return anchor\n\n def get_size(self, image_size: Dimensions) -> Dimensions:\n image_width, image_height = image_size\n size = int(image_width * self.scale_x), int(image_height * self.scale_y)\n return size\n\n def get_stroke(self, width: int) -> tuple[int, str]:\n if self.color == \"black\":\n width = 1\n color = \"#FFFFFF85\"\n elif \"#\" in self.color:\n width = 1\n color = \"#000000\" + self.color[-2:]\n else:\n color = \"black\"\n return width, color\n\n def stylize(self, text: str, **kwargs) -> str:\n lines = [line for line in kwargs.get(\"lines\", [text]) if line.strip()]\n\n if self.style == \"none\":\n return text\n\n if self.style == \"default\":\n text = text.capitalize() if all(line.islower() for line in lines) else text\n return text\n\n if self.style == \"mock\":\n return spongemock.mock(text, diversity_bias=0.75, random_seed=0)\n\n method = getattr(text, self.style or self.__class__.style, None)\n if method:\n return method()\n\n logger.warning(f\"Unsupported text style: {self.style}\")\n return text\n\napp/models/template.py METASEP\nimport asyncio\nimport shutil\nfrom functools import cached_property\nfrom pathlib import Path\n\nimport aiopath\nfrom datafiles import datafile, field\nfrom furl import furl\nfrom sanic import Request\nfrom sanic.log import logger\n\nfrom .. import settings, utils\nfrom ..types import Dimensions\nfrom .overlay import Overlay\nfrom .text import Text\n\n\n@datafile(\"../../templates/{self.id}/config.yml\", defaults=True)\nclass Template:\n\n id: str\n name: str = \"\"\n source: str | None = None\n\n text: list[Text] = field(\n default_factory=lambda: [Text(), Text(anchor_x=0.0, anchor_y=0.8)]\n )\n example: list[str] = field(default_factory=lambda: [\"Top Line\", \"Bottom Line\"])\n\n overlay: list[Overlay] = field(default_factory=lambda: [Overlay()])\n\n def __str__(self):\n return str(self.directory)\n\n def __lt__(self, other):\n return self.id < other.id\n\n @cached_property\n def valid(self) -> bool:\n if not settings.DEPLOYED:\n self._update_example()\n self.datafile.save()\n return (\n not self.id.startswith(\"_\")\n and self.image.suffix != settings.PLACEHOLDER_SUFFIX\n )\n\n def _update_example(self):\n for line in self.example:\n if line and not line.isupper():\n return\n self.example = [line.lower() for line in self.example]\n\n @cached_property\n def styles(self):\n styles = []\n for path in self.directory.iterdir():\n if not path.stem[0] in {\".\", \"_\"} and path.stem not in {\n \"config\",\n settings.DEFAULT_STYLE,\n }:\n styles.append(path.stem)\n if styles or self.overlay != [Overlay()]:\n styles.append(\"default\")\n styles.sort()\n return styles\n\n @cached_property\n def directory(self) -> Path:\n return self.datafile.path.parent\n\n @cached_property\n def image(self) -> Path:\n return self.get_image()\n\n def get_image(self, style: str = \"\") -> Path:\n style = style or settings.DEFAULT_STYLE\n\n if utils.urls.schema(style):\n url = style\n style = utils.text.fingerprint(url)\n\n self.directory.mkdir(exist_ok=True)\n for path in self.directory.iterdir():\n if path.stem == style and path.suffix != settings.PLACEHOLDER_SUFFIX:\n return path\n\n if style == settings.DEFAULT_STYLE:\n logger.debug(f\"No default background image for template: {self.id}\")\n return self.directory / (\n settings.DEFAULT_STYLE + settings.PLACEHOLDER_SUFFIX\n )\n\n logger.warning(f\"Style {style!r} not available for template: {self.id}\")\n return self.get_image()\n\n def jsonify(self, request: Request) -> dict:\n return {\n \"id\": self.id,\n \"name\": self.name,\n \"lines\": len(self.text),\n \"overlays\": len(self.overlay) if self.styles else 0,\n \"styles\": self.styles,\n \"blank\": request.app.url_for(\n \"Memes.blank\",\n template_id=self.id + \".\" + settings.DEFAULT_EXTENSION,\n _external=True,\n _scheme=settings.SCHEME,\n ),\n \"example\": {\n \"text\": self.example if any(self.example) else [],\n \"url\": self.build_example_url(request),\n },\n \"source\": self.source,\n \"_self\": self.build_self_url(request),\n }\n\n def build_self_url(self, request: Request) -> str:\n return request.app.url_for(\n \"Templates.detail\",\n id=self.id,\n _external=True,\n _scheme=settings.SCHEME,\n )\n\n def build_example_url(\n self,\n request: Request,\n *,\n extension: str = settings.DEFAULT_EXTENSION,\n external: bool = True,\n ) -> str:\n kwargs = {\n \"template_id\": self.id,\n \"text_paths\": utils.text.encode(self.example) + \".\" + extension,\n \"_external\": external,\n }\n if external:\n kwargs[\"_scheme\"] = settings.SCHEME\n url = request.app.url_for(\"Memes.text\", **kwargs)\n return utils.urls.clean(url)\n\n def build_custom_url(\n self,\n request: Request,\n text_lines: list[str],\n *,\n extension: str = settings.DEFAULT_EXTENSION,\n background: str = \"\",\n style: str = \"\",\n font: str = \"\",\n ):\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n if style == settings.DEFAULT_STYLE:\n style = \"\"\n url = request.app.url_for(\n \"Memes.text\",\n template_id=\"custom\" if self.id == \"_custom\" else self.id,\n text_paths=utils.text.encode(text_lines) + \".\" + extension,\n _external=True,\n _scheme=settings.SCHEME,\n **utils.urls.params(background=background, style=style, font=font),\n )\n return utils.urls.clean(url)\n\n def build_path(\n self,\n text_lines: list[str],\n font_name: str,\n style: str,\n size: Dimensions,\n watermark: str,\n extension: str,\n frames: int = 0,\n ) -> Path:\n slug = utils.text.encode(text_lines)\n variant = str(self.text) + font_name + style + str(size) + watermark\n if frames:\n variant += str(frames)\n fingerprint = utils.text.fingerprint(variant, prefix=\"\")\n filename = f\"{slug}.{fingerprint}.{extension}\"\n return Path(self.id) / filename\n\n @classmethod\n async def create(cls, url: str, *, force=False) -> \"Template\":\n try:\n parsed = furl(url)\n except ValueError as e:\n logger.error(e)\n return cls.objects.get(\"_error\")\n\n if parsed.netloc and \"memegen.link\" in parsed.netloc:\n logger.info(f\"Handling template URL: {url}\")\n if len(parsed.path.segments) > 1:\n id = Path(parsed.path.segments[1]).stem\n if id != \"custom\":\n return cls.objects.get_or_none(id) or cls.objects.get(\"_error\")\n background = parsed.args.get(\"background\")\n if not background:\n return cls.objects.get(\"_error\")\n url = background\n try:\n parsed = furl(url)\n except ValueError as e:\n logger.error(e)\n return cls.objects.get(\"_error\")\n\n id = utils.text.fingerprint(url)\n template = cls.objects.get_or_create(id, url)\n\n suffix = Path(str(parsed.path)).suffix\n if not suffix or len(suffix) > 10:\n logger.warning(f\"Unable to determine image extension: {url}\")\n suffix = settings.PLACEHOLDER_SUFFIX\n\n filename = \"default\" + suffix\n path = aiopath.AsyncPath(template.directory) / filename\n\n if await path.exists() and not settings.DEBUG and not force:\n logger.info(f\"Found background {url} at {path}\")\n return template\n\n logger.info(f\"Saving background {url} to {path}\")\n if not await utils.http.download(url, path):\n return template\n\n try:\n await asyncio.to_thread(utils.images.load, Path(path))\n except utils.images.EXCEPTIONS as e:\n logger.error(e)\n await path.unlink(missing_ok=True)\n\n return template\n\n async def check(self, style: str, *, force=False) -> bool:\n if style in {\"\", None, settings.DEFAULT_STYLE}:\n return True\n\n if style in self.styles:\n return True\n\n if not utils.urls.schema(style):\n logger.error(f\"Invalid style for {self.id} template: {style}\")\n return False\n\n filename = utils.text.fingerprint(style, suffix=self.image.suffix)\n path = aiopath.AsyncPath(self.directory) / filename\n if await path.exists() and not settings.DEBUG and not force:\n logger.info(f\"Found overlay {style} at {path}\")\n return True\n\n urls = style.split(\",\")\n logger.info(f\"Embeding {len(urls)} overlay image(s) onto {path}\")\n await asyncio.to_thread(shutil.copy, self.image, path)\n\n embedded = 0\n for index, url in enumerate(urls):\n success = await self._embed(index, url, path, force)\n if success:\n embedded += 1\n\n if len(urls) == 1 and not embedded:\n await path.unlink()\n\n return embedded == len(urls)\n\n async def _embed(\n self, index: int, url: str, background: aiopath.AsyncPath, force: bool\n ) -> bool:\n if url.strip() in {\"\", settings.DEFAULT_STYLE}:\n return True\n\n suffix = Path(str(furl(url).path)).suffix\n if not suffix:\n logger.warning(f\"Unable to determine image extension: {url}\")\n suffix = \".png\"\n\n filename = utils.text.fingerprint(url, prefix=\"_embed-\", suffix=suffix)\n foreground = aiopath.AsyncPath(self.directory) / filename\n\n if await foreground.exists() and not settings.DEBUG and not force:\n logger.info(f\"Found overlay {url} at {foreground}\")\n else:\n logger.info(f\"Saving overlay {url} to {foreground}\")\n await utils.http.download(url, foreground)\n\n try:\n await asyncio.to_thread(\n utils.images.embed, self, index, Path(foreground), Path(background)\n )\n except utils.images.EXCEPTIONS as e:\n logger.error(e)\n await foreground.unlink(missing_ok=True)\n\n return await foreground.exists()\n\n def clean(self):\n for path in self.directory.iterdir():\n if path.stem not in {\"config\", \"default\"}:\n path.unlink()\n\n def delete(self):\n if self.directory.exists():\n shutil.rmtree(self.directory)\n\n def matches(self, query: str) -> bool:\n example = \" \".join(line.lower() for line in self.example)\n return any((query in self.id, query in self.name.lower(), query in example))\n\napp/models/overlay.py METASEP\nfrom dataclasses import dataclass\n\nfrom ..types import Box, Dimensions\n\n\n@dataclass\nclass Overlay:\n\n center_x: float = 0.5\n center_y: float = 0.5\n\n angle: float = 0.0\n\n scale: float = 0.25\n\n def get_size(self, background_size: Dimensions) -> Dimensions:\n background_width, background_height = background_size\n dimension = min(\n int(background_width * self.scale),\n int(background_height * self.scale),\n )\n return dimension, dimension\n\n def get_box(\n self, background_size: Dimensions, foreground_size: Dimensions | None = None\n ) -> Box:\n background_width, background_height = background_size\n if foreground_size is None:\n foreground_size = self.get_size(background_size)\n foreground_width, foreground_height = foreground_size\n box = (\n int(background_width * self.center_x - foreground_width / 2),\n int(background_height * self.center_y - foreground_height / 2),\n int(background_width * self.center_x + foreground_width / 2),\n int(background_height * self.center_y + foreground_height / 2),\n )\n return box\n\napp/models/font.py METASEP\nfrom __future__ import annotations\n\nfrom dataclasses import KW_ONLY, asdict, dataclass\nfrom pathlib import Path\n\nfrom .. import settings\n\n\nclass Manager:\n @staticmethod\n def get(name: str) -> Font:\n name = name or settings.DEFAULT_FONT\n for font in FONTS:\n if name in (font.id, font.alias):\n return font\n raise ValueError(f\"Unknown font: {name}\")\n\n @staticmethod\n def all() -> list[Font]:\n return FONTS\n\n\n@dataclass\nclass Font:\n\n filename: str\n id: str\n _: KW_ONLY\n alias: str = \"\"\n\n objects = Manager()\n\n @property\n def path(self) -> Path:\n return settings.ROOT / \"fonts\" / self.filename\n\n @property\n def data(self) -> dict:\n return asdict(self)\n\n\nFONTS = [\n Font(\"TitilliumWeb-Black.ttf\", \"titilliumweb\", alias=\"thick\"),\n Font(\"NotoSans-Bold.ttf\", \"notosans\"),\n Font(\"Kalam-Regular.ttf\", \"kalam\", alias=\"comic\"),\n Font(\"Impact.ttf\", \"impact\"),\n Font(\"TitilliumWeb-SemiBold.ttf\", \"titilliumweb-thin\", alias=\"thin\"),\n Font(\"Segoe UI Bold.ttf\", \"segoe\", alias=\"tiny\"),\n]\n\napp/models/__init__.py METASEP\nfrom .font import Font\nfrom .overlay import Overlay\nfrom .template import Template\nfrom .text import Text\n\napp/types.py METASEP\nBox = tuple[int, int, int, int]\nDimensions = tuple[int, int]\nPoint = tuple[int, int]\nOffset = tuple[int, int]\n\napp/settings.py METASEP\nimport os\nfrom pathlib import Path\n\nROOT = Path(__file__).parent.parent.resolve()\n\n# Server configuration\n\nPORT = int(os.environ.get(\"PORT\", 5000))\nWORKERS = int(os.environ.get(\"WEB_CONCURRENCY\", 1))\nDEBUG = bool(os.environ.get(\"DEBUG\", False))\n\nif \"DOMAIN\" in os.environ: # staging / production\n SERVER_NAME = os.environ[\"DOMAIN\"]\n RELEASE_STAGE = \"staging\" if \"staging\" in SERVER_NAME else \"production\"\n SCHEME = \"https\"\nelif \"HEROKU_APP_NAME\" in os.environ: # review apps\n SERVER_NAME = os.environ[\"HEROKU_APP_NAME\"] + \".herokuapp.com\"\n RELEASE_STAGE = \"review\"\n SCHEME = \"https\"\nelse: # localhost\n SERVER_NAME = f\"localhost:{PORT}\"\n RELEASE_STAGE = \"local\"\n SCHEME = \"http\"\n\nBASE_URL = f\"{SCHEME}://{SERVER_NAME}\"\nDEPLOYED = RELEASE_STAGE != \"local\" and not DEBUG\n\n# API\n\nSUFFIX = \" [DEBUG ONLY]\" if not DEPLOYED else \"\"\nPLACEHOLDER = \"string\" # Swagger UI placeholder value\n\n# Fonts\n\nDEFAULT_FONT = \"thick\"\n\nMINIMUM_FONT_SIZE = 7\n\n# Image rendering\n\nIMAGES_DIRECTORY = ROOT / \"images\"\n\nDEFAULT_STYLE = \"default\"\nDEFAULT_EXTENSION = \"png\"\nALLOWED_EXTENSIONS = [DEFAULT_EXTENSION, \"jpg\", \"jpeg\", \"gif\", \"webp\"]\nPLACEHOLDER_SUFFIX = \".img\"\n\nPREVIEW_SIZE = (300, 300)\nDEFAULT_SIZE = (600, 600)\n\nMAXIMUM_PIXELS = 1920 * 1080\nMAXIMUM_FRAMES = 20\n\n# Watermarks\n\nDISABLED_WATERMARK = \"none\"\nDEFAULT_WATERMARK = \"Memegen.link\"\nALLOWED_WATERMARKS = [DEFAULT_WATERMARK]\n\nWATERMARK_HEIGHT = 15\n\nPREVIEW_TEXT = \"PREVIEW\"\n\n# Test images\n\nTEST_IMAGES_DIRECTORY = ROOT / \"app\" / \"tests\" / \"images\"\nTEST_IMAGES = [\n (\n \"iw\",\n [\"tests code\", \"in production\"],\n \"jpg\",\n ),\n (\n \"fry\",\n [\"a\", \"b\"],\n \"png\",\n ),\n (\n \"fry\",\n [\"short line\", \"longer line of text than the short one\"],\n \"png\",\n ),\n (\n \"fry\",\n [\"longer line of text than the short one\", \"short line\"],\n \"png\",\n ),\n (\n \"sparta\",\n [\"\", \"this is a wide image!\"],\n \"png\",\n ),\n (\n \"ski\",\n [\n \"if you try to put a bunch more text than can possibly fit on a meme\",\n \"you're gonna have a bad time\",\n ],\n \"png\",\n ),\n (\n \"ds\",\n [\"Push this button.\", \"Push that button.\", \"can't decide which is worse\"],\n \"png\",\n ),\n (\n \"spongebob\",\n [\"You: Stop talking like that\", \"Me: Stop talking like that\"],\n \"png\",\n ),\n (\n \"mouth\",\n [\"Sales Team presenting solution that won't work\", \"Excited Customer\", \"Me\"],\n \"png\",\n ),\n (\n \"cmm\",\n [\"Many\\nextra\\nlines\\nof\\ntext\"],\n \"png\",\n ),\n (\n \"oprah\",\n [\"you get animated text\", \"and you get animated text\"],\n \"gif\",\n ),\n]\n\n# Analytics\n\nTRACK_REQUESTS = True\nREMOTE_TRACKING_URL = os.getenv(\"REMOTE_TRACKING_URL\")\n\nREMOTE_TRACKING_ERRORS = 0\nREMOTE_TRACKING_ERRORS_LIMIT = int(os.getenv(\"REMOTE_TRACKING_ERRORS_LIMIT\", \"10\"))\n\nBUGSNAG_API_KEY = os.getenv(\"BUGSNAG_API_KEY\")\n\napp/main.py METASEP\nimport asyncio\nimport random\n\nimport log\nfrom sanic import Sanic, response\nfrom sanic_ext import openapi\n\nfrom app import config, helpers, settings, utils\n\napp = Sanic(name=\"memegen\")\nconfig.init(app)\n\n\n@app.get(\"/\")\n@openapi.exclude(True)\ndef index(request):\n return response.redirect(\"/docs\")\n\n\n@app.get(\"/examples\")\n@openapi.exclude(True)\nasync def examples(request):\n animated = utils.urls.flag(request, \"animated\")\n items = await asyncio.to_thread(helpers.get_example_images, request, \"\", animated)\n urls = [items[0] for items in items]\n if settings.DEBUG:\n refresh = int(request.args.get(\"refresh\", 5 * 60))\n else:\n refresh = 0\n random.shuffle(urls)\n content = utils.html.gallery(urls, columns=True, refresh=refresh)\n return response.html(content)\n\n\n@app.get(\"/test\")\n@openapi.exclude(True)\nasync def test(request):\n if not settings.DEBUG:\n return response.redirect(\"/\")\n\n urls = await asyncio.to_thread(helpers.get_test_images, request)\n content = utils.html.gallery(urls, columns=False, refresh=20)\n return response.html(content)\n\n\n@app.get(\"/favicon.ico\")\n@openapi.exclude(True)\nasync def favicon(request):\n return await response.file(\"app/static/favicon.ico\")\n\n\n@app.get(\"/robots.txt\")\n@openapi.exclude(True)\nasync def robots(request):\n return await response.file(\"app/static/robots.txt\")\n\n\nif __name__ == \"__main__\":\n log.reset()\n log.silence(\"datafiles\", allow_warning=True)\n app.run(\n host=\"0.0.0.0\",\n port=settings.PORT,\n workers=settings.WORKERS,\n debug=settings.DEBUG,\n access_log=False,\n motd=False,\n )\n\napp/helpers.py METASEP\nfrom sanic import Request\n\nfrom . import settings, utils\nfrom .models import Template\n\n\ndef get_valid_templates(\n request: Request, query: str = \"\", animated: bool | None = None\n) -> list[dict]:\n templates = Template.objects.filter(valid=True, _exclude=\"_custom\")\n if query:\n templates = [t for t in templates if t.matches(query)]\n else:\n templates = sorted(templates)\n if animated is True:\n templates = [t for t in templates if \"animated\" in t.styles]\n elif animated is False:\n templates = [t for t in templates if \"animated\" not in t.styles]\n return [template.jsonify(request) for template in templates]\n\n\ndef get_example_images(\n request: Request, query: str = \"\", animated: bool | None = None\n) -> list[tuple[str, str]]:\n templates = Template.objects.filter(valid=True, _exclude=\"_custom\")\n if query:\n templates = [t for t in templates if t.matches(query)]\n else:\n templates = sorted(templates)\n\n images = []\n for template in templates:\n\n if animated is True and \"animated\" not in template.styles:\n continue\n\n if \"animated\" in template.styles and animated is not False:\n extension = \"gif\"\n else:\n extension = settings.DEFAULT_EXTENSION\n\n example = template.build_example_url(request, extension=extension)\n self = template.build_self_url(request)\n images.append((example, self))\n\n return images\n\n\ndef get_test_images(request: Request) -> list[str]:\n return [\n request.app.url_for(\n \"Memes.text\",\n template_id=id,\n text_paths=utils.text.encode(lines) + \".\" + extension,\n )\n for id, lines, extension in settings.TEST_IMAGES\n ]\n\napp/config.py METASEP\nimport bugsnag\nfrom aiohttp.client_exceptions import ClientPayloadError\nfrom PIL import UnidentifiedImageError\nfrom sanic.exceptions import MethodNotSupported, NotFound\nfrom sanic.handlers import ErrorHandler\n\nfrom . import settings, utils, views\n\nIGNORED_EXCEPTIONS = (\n ClientPayloadError,\n MethodNotSupported,\n NotFound,\n UnidentifiedImageError,\n)\n\n\nclass BugsnagErrorHandler(ErrorHandler):\n def default(self, request, exception):\n if self._should_notify(exception):\n bugsnag.notify(exception, metadata={\"request\": request.url})\n return super().default(request, exception)\n\n @staticmethod\n def _should_notify(exception) -> bool:\n if not settings.BUGSNAG_API_KEY:\n return False\n if isinstance(exception, IGNORED_EXCEPTIONS):\n return False\n return True\n\n\ndef init(app):\n app.config.API_HOST = app.config.SERVER_NAME = settings.SERVER_NAME\n app.config.API_SCHEMES = [settings.SCHEME]\n app.config.API_VERSION = utils.meta.version()\n app.config.API_TITLE = \"Memegen.link\"\n app.config.API_CONTACT_EMAIL = \"support@maketested.com\"\n app.config.API_LICENSE_NAME = \"View the license\"\n app.config.API_LICENSE_URL = (\n \"https://github.com/jacebrowning/memegen/blob/main/LICENSE.txt\"\n )\n\n app.config.OAS_UI_DEFAULT = \"swagger\"\n app.config.OAS_UI_REDOC = False\n app.config.SWAGGER_UI_CONFIGURATION = {\n \"operationsSorter\": \"alpha\",\n \"docExpansion\": \"list\",\n }\n\n app.ext.openapi.add_security_scheme(\"ApiKeyAuth\", type=\"apiKey\", name=\"X-API-KEY\")\n app.ext.openapi.secured(\"ApiKeyAuth\")\n\n app.blueprint(views.clients.blueprint)\n app.blueprint(views.memes.blueprint)\n app.blueprint(views.templates.blueprint)\n app.blueprint(views.shortcuts.blueprint) # registered last to avoid collisions\n\n app.error_handler = BugsnagErrorHandler()\n bugsnag.configure(\n api_key=settings.BUGSNAG_API_KEY,\n project_root=\"/app\",\n release_stage=settings.RELEASE_STAGE,\n )\n\napp/__init__.py METASEP\n\napp/views/templates.py METASEP\nimport asyncio\nfrom contextlib import suppress\nfrom dataclasses import dataclass\n\nfrom sanic import Blueprint, exceptions, response\nfrom sanic_ext import openapi\n\nfrom .. import helpers, settings, utils\nfrom ..models import Template\n\nblueprint = Blueprint(\"Templates\", url_prefix=\"/templates\")\n\n\n@dataclass\nclass ExampleResponse:\n text: list[str]\n url: str\n\n\n@dataclass\nclass TemplateResponse:\n id: str\n name: str\n lines: int\n overlays: int\n styles: list[str]\n blank: str\n example: ExampleResponse\n source: str\n _self: str\n\n\n@blueprint.get(\"/\")\n@openapi.summary(\"List all templates\")\n@openapi.parameter(\n \"animated\",\n bool,\n \"query\",\n description=\"Limit results to templates supporting animation\",\n)\n@openapi.parameter(\n \"filter\", str, \"query\", description=\"Part of the name or example to match\"\n)\n@openapi.response(\n 200,\n {\"application/json\": list[TemplateResponse]},\n \"Successfully returned a list of all templates\",\n)\nasync def index(request):\n query = request.args.get(\"filter\", \"\").lower()\n animated = utils.urls.flag(request, \"animated\")\n data = await asyncio.to_thread(\n helpers.get_valid_templates, request, query, animated\n )\n return response.json(data)\n\n\n@blueprint.get(\"/\")\n@openapi.summary(\"View a specific template\")\n@openapi.parameter(\"id\", str, \"path\")\n@openapi.response(\n 200,\n {\"application/json\": TemplateResponse},\n \"Successfully returned a specific templates\",\n)\n@openapi.response(404, str, description=\"Template not found\")\nasync def detail(request, id):\n template: Template = Template.objects.get_or_none(id)\n if template:\n return response.json(template.jsonify(request))\n raise exceptions.NotFound(f\"Template not found: {id}\")\n\n\n@dataclass\nclass MemeRequest:\n text_lines: list[str]\n extension: str\n redirect: bool\n\n\n@dataclass\nclass MemeResponse:\n url: str\n\n\n@blueprint.post(\"/\")\n@openapi.tag(\"Memes\")\n@openapi.operation(\"Memes.create_from_template\")\n@openapi.exclude(settings.DEPLOYED)\n@openapi.summary(\"Create a meme from a template\" + settings.SUFFIX)\n@openapi.parameter(\"id\", str, \"path\")\n@openapi.body({\"application/json\": MemeRequest})\n@openapi.response(\n 201,\n {\"application/json\": MemeResponse},\n \"Successfully created a meme from a template\",\n)\nasync def build(request, id):\n return await generate_url(request, id)\n\n\n@dataclass\nclass CustomRequest:\n background: str\n style: str\n text_lines: list[str]\n font: str\n extension: str\n redirect: bool\n\n\n@blueprint.post(\"/custom\")\n@openapi.tag(\"Memes\")\n@openapi.exclude(settings.DEPLOYED)\n@openapi.summary(\"Create a meme from any image\" + settings.SUFFIX)\n@openapi.body({\"application/json\": CustomRequest})\n@openapi.response(\n 201,\n {\"application/json\": MemeResponse},\n \"Successfully created a meme from a custom image\",\n)\nasync def custom(request):\n return await generate_url(request)\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: Template = Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\napp/views/shortcuts.py METASEP\nfrom sanic import Blueprint, exceptions, response\nfrom sanic.log import logger\nfrom sanic_ext import openapi\n\nfrom .. import models, settings, utils\n\nblueprint = Blueprint(\"Shortcuts\", url_prefix=\"/\")\n\n\n@blueprint.get(r\"/images/\")\n@openapi.summary(\"Redirect to an example image\")\n@openapi.parameter(\"template_id\", str, \"path\")\n@openapi.response(\n 302, {\"image/*\": bytes}, \"Successfully redirected to an example image\"\n)\n@openapi.response(404, {\"text/html\": str}, \"Template not found\")\n@openapi.response(501, {\"text/html\": str}, \"Template not fully implemented\")\nasync def example_path(request, template_id):\n template_id = utils.urls.clean(template_id)\n\n if settings.DEBUG:\n template = models.Template.objects.get_or_create(template_id)\n else:\n template = models.Template.objects.get_or_none(template_id)\n\n if template and template.valid:\n url = template.build_example_url(request, external=False)\n if settings.DEBUG:\n url = url.removesuffix(\".png\")\n return response.redirect(url)\n\n if settings.DEBUG:\n if \"<\" in template_id:\n message = f\"Replace {template_id!r} in the URL\"\n else:\n message = f\"Template not fully implemented: {template}\"\n logger.warning(message)\n template.datafile.save()\n raise exceptions.SanicException(message, 501)\n\n raise exceptions.NotFound(f\"Template not found: {template_id}\")\n\n\n@blueprint.get(r\"/\")\n@openapi.exclude(settings.DEPLOYED)\n@openapi.summary(\"Redirect to an example image\" + settings.SUFFIX)\n@openapi.parameter(\"template_id\", str, \"path\")\n@openapi.response(\n 302, {\"image/*\": bytes}, \"Successfully redirected to an example image\"\n)\n@openapi.response(404, {\"text/html\": str}, \"Template not found\")\nasync def legacy_example_image(request, template_id):\n template_id, extension = template_id.rsplit(\".\", 1)\n template = models.Template.objects.get_or_none(template_id)\n if template:\n url = template.build_example_url(request, extension=extension, external=False)\n return response.redirect(url)\n raise exceptions.NotFound(f\"Template not found: {template_id}\")\n\n\n@blueprint.get(r\"/\")\n@openapi.exclude(settings.DEPLOYED)\n@openapi.summary(\"Redirect to an example image\" + settings.SUFFIX)\n@openapi.parameter(\"template_id\", str, \"path\")\n@openapi.response(\n 302, {\"image/*\": bytes}, \"Successfully redirected to an example image\"\n)\nasync def legacy_example_path(request, template_id):\n template_id = template_id.strip(\"/\")\n return response.redirect(f\"/images/{template_id}\")\n\n\n@blueprint.get(r\"/images//\")\n@openapi.summary(\"Redirect to a custom image\")\n@openapi.parameter(\"text_paths\", str, \"path\")\n@openapi.parameter(\"template_id\", str, \"path\")\n@openapi.response(302, {\"image/*\": bytes}, \"Successfully redirected to a custom image\")\nasync def custom_path(request, template_id, text_paths):\n if template_id == \"images\":\n return response.redirect(f\"/images/{text_paths}\".removesuffix(\"/\"))\n\n if not settings.DEBUG:\n url = request.app.url_for(\n \"Memes.text\",\n template_id=template_id,\n text_paths=utils.urls.clean(text_paths) + \".\" + settings.DEFAULT_EXTENSION,\n )\n return response.redirect(url)\n\n template = models.Template.objects.get_or_create(template_id)\n template.datafile.save()\n animated = utils.urls.flag(request, \"animated\")\n extension = \"gif\" if animated else \"png\"\n content = utils.html.gallery(\n [f\"/images/{template_id}/{text_paths}.{extension}\"],\n columns=False,\n refresh=30 if animated else 3,\n query_string=request.query_string,\n )\n return response.html(content)\n\n\n@blueprint.get(r\"//\")\n@openapi.exclude(settings.DEPLOYED)\n@openapi.summary(\"Redirect to a custom image\" + settings.SUFFIX)\n@openapi.parameter(\"text_paths\", str, \"path\")\n@openapi.parameter(\"template_id\", str, \"path\")\n@openapi.response(302, {\"image/*\": bytes}, \"Successfully redirected to a custom image\")\n@openapi.response(404, {\"text/html\": str}, description=\"Template not found\")\nasync def legacy_custom_image(request, template_id, text_paths):\n text_paths, extension = text_paths.rsplit(\".\", 1)\n template = models.Template.objects.get_or_none(template_id)\n if template:\n url = request.app.url_for(\n \"Memes.text\",\n template_id=template_id,\n text_paths=text_paths + \".\" + extension,\n )\n return response.redirect(url)\n raise exceptions.NotFound(f\"Template not found: {template_id}\")\n\n\n@blueprint.get(r\"//\")\n@openapi.exclude(settings.DEPLOYED)\n@openapi.summary(\"Redirect to a custom image\" + settings.SUFFIX)\n@openapi.parameter(\"text_paths\", str, \"path\")\n@openapi.parameter(\"template_id\", str, \"path\")\n@openapi.response(302, {\"image/*\": bytes}, \"Successfully redirected to a custom image\")\nasync def legacy_custom_path(request, template_id, text_paths):\n if template_id == \"images\":\n return response.redirect(f\"/images/{text_paths}\".removesuffix(\"/\"))\n return response.redirect(f\"/images/{template_id}/{text_paths}\")\n\napp/views/memes.py METASEP\nimport asyncio\nfrom dataclasses import dataclass\n\nfrom sanic import Blueprint, exceptions, response\nfrom sanic.log import logger\nfrom sanic_ext import openapi\n\nfrom .. import helpers, models, settings, utils\nfrom .templates import generate_url\n\nblueprint = Blueprint(\"Memes\", url_prefix=\"/images\")\n\n\n@dataclass\nclass ExampleResponse:\n url: str\n template: str\n\n\n@blueprint.get(\"/\")\n@openapi.summary(\"List example memes\")\n@openapi.operation(\"Memes.list\")\n@openapi.parameter(\n \"filter\", str, \"query\", description=\"Part of the template name or example to match\"\n)\n@openapi.response(\n 200,\n {\"application/json\": list[ExampleResponse]},\n \"Successfully returned a list of example memes\",\n)\nasync def index(request):\n query = request.args.get(\"filter\", \"\").lower()\n examples = await asyncio.to_thread(helpers.get_example_images, request, query)\n return response.json(\n [{\"url\": url, \"template\": template} for url, template in examples]\n )\n\n\n@dataclass\nclass MemeRequest:\n template_id: str\n style: list[str]\n text_lines: list[str]\n font: str\n extension: str\n redirect: bool\n\n\n@dataclass\nclass MemeResponse:\n url: str\n\n\n@dataclass\nclass ErrorResponse:\n error: str\n\n\n@blueprint.post(\"/\")\n@openapi.summary(\"Create a meme from a template\")\n@openapi.operation(\"Memes.create\")\n@openapi.body({\"application/json\": MemeRequest})\n@openapi.response(\n 201, {\"application/json\": MemeResponse}, \"Successfully created a meme\"\n)\n@openapi.response(\n 400,\n {\"application/json\": ErrorResponse},\n 'Required \"template_id\" missing in request body',\n)\n@openapi.response(\n 404, {\"application/json\": ErrorResponse}, 'Specified \"template_id\" does not exist'\n)\nasync def create(request):\n return await generate_url(request, template_id_required=True)\n\n\n@dataclass\nclass AutomaticRequest:\n text: str\n safe: bool\n redirect: bool\n\n\n@blueprint.post(\"/automatic\")\n@openapi.exclude(not settings.REMOTE_TRACKING_URL)\n@openapi.summary(\"Create a meme from word or phrase\")\n@openapi.body({\"application/json\": AutomaticRequest})\n@openapi.response(\n 201, {\"application/json\": MemeResponse}, \"Successfully created a meme\"\n)\n@openapi.response(\n 400, {\"application/json\": ErrorResponse}, 'Required \"text\" missing in request body'\n)\nasync def automatic(request):\n if request.form:\n payload = dict(request.form)\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n try:\n query = payload[\"text\"]\n except KeyError:\n return response.json({\"error\": '\"text\" is required'}, status=400)\n\n results = await utils.meta.search(request, query, payload.get(\"safe\", True))\n logger.info(f\"Found {len(results)} result(s)\")\n if not results:\n return response.json({\"message\": f\"No results matched: {query}\"}, status=404)\n\n url = utils.urls.normalize(results[0][\"image_url\"])\n confidence = results[0][\"confidence\"]\n logger.info(f\"Top result: {url} ({confidence})\")\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url, \"confidence\": confidence}, status=201)\n\n\n@dataclass\nclass CustomRequest:\n background: str\n style: str\n text_lines: list[str]\n font: str\n extension: str\n redirect: bool\n\n\n@blueprint.post(\"/custom\")\n@openapi.summary(\"Create a meme from any image\")\n@openapi.body({\"application/json\": CustomRequest})\n@openapi.response(\n 201,\n {\"application/json\": MemeResponse},\n description=\"Successfully created a meme from a custom image\",\n)\nasync def custom(request):\n return await generate_url(request)\n\n\n@blueprint.get(\"/custom\")\n@openapi.summary(\"List popular custom memes\")\n@openapi.operation(\"Memes.list_custom\")\n@openapi.parameter(\"safe\", bool, \"query\", description=\"Exclude NSFW results\")\n@openapi.parameter(\n \"filter\", str, \"query\", description=\"Part of the meme's text to match\"\n)\n@openapi.response(\n 200,\n {\"application/json\": list[MemeResponse]},\n \"Successfully returned a list of custom memes\",\n)\nasync def list_custom(request):\n query = request.args.get(\"filter\", \"\").lower()\n safe = utils.urls.flag(request, \"safe\", True)\n\n results = await utils.meta.search(request, query, safe, mode=\"results\")\n logger.info(f\"Found {len(results)} result(s)\")\n if not results:\n return response.json({\"message\": f\"No results matched: {query}\"}, status=404)\n\n items = []\n for result in results:\n url = utils.urls.normalize(result[\"image_url\"])\n url, _updated = await utils.meta.tokenize(request, url)\n items.append({\"url\": url})\n\n return response.json(items, status=200)\n\n\n@blueprint.get(r\"/\")\n@openapi.tag(\"Templates\")\n@openapi.summary(\"Display a template background\")\n@openapi.parameter(\"template_id\", str, \"path\")\n@openapi.response(\n 200, {\"image/*\": bytes}, \"Successfully displayed a template background\"\n)\n@openapi.response(404, {\"image/*\": bytes}, \"Template not found\")\n@openapi.response(415, {\"image/*\": bytes}, \"Unable to download image URL\")\n@openapi.response(\n 422,\n {\"image/*\": bytes},\n \"Invalid style for template or no image URL specified for custom template\",\n)\nasync def blank(request, template_id):\n template_id, extension = template_id.rsplit(\".\", 1)\n\n if request.args.get(\"style\") == \"animated\" and extension != \"gif\":\n # TODO: Move this pattern to utils\n params = {k: v for k, v in request.args.items() if k != \"style\"}\n url = request.app.url_for(\n \"Memes.blank\",\n template_id=template_id + \".gif\",\n **params,\n )\n return response.redirect(utils.urls.clean(url), status=301)\n\n return await render_image(request, template_id, extension=extension)\n\n\n@blueprint.get(r\"//\")\n@openapi.summary(\"Display a custom meme\")\n@openapi.parameter(\"text_paths\", str, \"path\")\n@openapi.parameter(\"template_id\", str, \"path\")\n@openapi.response(200, {\"image/*\": bytes}, \"Successfully displayed a custom meme\")\n@openapi.response(404, {\"image/*\": bytes}, \"Template not found\")\n@openapi.response(414, {\"image/*\": bytes}, \"Custom text too long (length >200)\")\n@openapi.response(415, {\"image/*\": bytes}, \"Unable to download image URL\")\n@openapi.response(\n 422,\n {\"image/*\": bytes},\n \"Invalid style for template or no image URL specified for custom template\",\n)\nasync def text(request, template_id, text_paths):\n text_paths, extension = text_paths.rsplit(\".\", 1)\n\n if request.args.get(\"style\") == \"animated\" and extension != \"gif\":\n # TODO: Move this pattern to utils\n params = {k: v for k, v in request.args.items() if k != \"style\"}\n url = request.app.url_for(\n \"Memes.text\",\n template_id=template_id,\n text_paths=text_paths + \".gif\",\n **params,\n )\n return response.redirect(utils.urls.clean(url), status=301)\n\n slug, updated = utils.text.normalize(text_paths)\n if updated:\n url = request.app.url_for(\n \"Memes.text\",\n template_id=template_id,\n text_paths=slug + \".\" + extension,\n **request.args,\n )\n return response.redirect(utils.urls.clean(url), status=301)\n\n url, updated = await utils.meta.tokenize(request, request.url)\n if updated:\n return response.redirect(url, status=302)\n\n watermark, updated = await utils.meta.get_watermark(request)\n if updated:\n # TODO: Move this pattern to utils\n params = {k: v for k, v in request.args.items() if k != \"watermark\"}\n url = request.app.url_for(\n \"Memes.text\",\n template_id=template_id,\n text_paths=slug + \".\" + extension,\n **params,\n )\n return response.redirect(utils.urls.clean(url), status=302)\n\n return await render_image(request, template_id, slug, watermark, extension)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n\n font_name = utils.urls.arg(request.args, \"\", \"font\")\n if font_name == settings.PLACEHOLDER:\n font_name = \"\"\n else:\n try:\n models.Font.objects.get(font_name)\n except ValueError:\n font_name = \"\"\n status = 422\n\n try:\n size = int(request.args.get(\"width\", 0)), int(request.args.get(\"height\", 0))\n if 0 < size[0] < 10 or 0 < size[1] < 10:\n raise ValueError(f\"dimensions are too small: {size}\")\n except ValueError as e:\n logger.error(f\"Invalid size: {e}\")\n size = 0, 0\n status = 422\n\n frames = int(request.args.get(\"frames\", 0))\n\n path = await asyncio.to_thread(\n utils.images.save,\n template,\n lines,\n watermark,\n font_name=font_name,\n extension=extension,\n style=style,\n size=size,\n maximum_frames=frames,\n )\n return await response.file(path, status)\n\napp/views/clients.py METASEP\nimport asyncio\nfrom dataclasses import dataclass\nfrom datetime import datetime\n\nfrom sanic import Blueprint, response\nfrom sanic.log import logger\nfrom sanic_ext import openapi\n\nfrom .. import models, utils\n\nblueprint = Blueprint(\"Clients\", url_prefix=\"/\")\n\n\n@dataclass\nclass AuthResponse:\n email: str\n image_access: bool\n search_access: bool\n created: datetime\n modified: datetime\n\n\n@dataclass\nclass ErrorResponse:\n error: str\n\n\n@blueprint.post(\"/auth\")\n@openapi.summary(\"Validate your API key\")\n@openapi.response(200, {\"application/json\": AuthResponse}, \"Your API key is valid\")\n@openapi.response(401, {\"application/json\": ErrorResponse}, \"Your API key is invalid\")\nasync def validate(request):\n info = await utils.meta.authenticate(request)\n return response.json(\n info or {\"error\": \"API key missing or invalid.\"},\n status=200 if info else 401,\n )\n\n\n@dataclass\nclass FontResponse:\n filename: str\n id: str\n alias: str\n\n\n@blueprint.get(\"/fonts\")\n@openapi.summary(\"List available fonts\")\n@openapi.response(\n 200,\n {\"application/json\": list[FontResponse]},\n \"Successfully returned a list of fonts\",\n)\nasync def fonts(request):\n return response.json([font.data for font in models.Font.objects.all()])\n\n\n@blueprint.get(\"/images/preview.jpg\")\n@openapi.summary(\"Display a preview of a custom meme\")\n@openapi.parameter(\"lines[]\", str, \"query\", description=\"Lines of text to render\")\n@openapi.parameter(\"style\", str, \"query\", description=\"Style name or custom overlay\")\n@openapi.parameter(\n \"template\", str, \"query\", description=\"Template ID, URL, or custom background\"\n)\n@openapi.response(200, {\"image/jpeg\": bytes}, \"Successfully displayed a custom meme\")\nasync def preview(request):\n id = request.args.get(\"template\", \"_error\")\n lines = request.args.getlist(\"lines[]\", [])\n style = request.args.get(\"style\") or \",\".join(request.args.getlist(\"styles[]\", []))\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n return await preview_image(request, id, lines, style)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\napp/views/__init__.py METASEP\nfrom . import clients, memes, shortcuts, templates\n\napp/views/helpers.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n","type":"infile"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n","type":"infile"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n\n font_name = utils.urls.arg(request.args, \"\", \"font\")\n if font_name == settings.PLACEHOLDER:\n font_name = \"\"\n else:\n try:","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n\n font_name = utils.urls.arg(request.args, \"\", \"font\")\n if font_name == settings.PLACEHOLDER:\n font_name = \"\"\n else:\n try:\n models.Font.objects.get(font_name)\n except ValueError:\n font_name = \"\"\n status = 422\n\n try:\n size = int(request.args.get(\"width\", 0)), int(request.args.get(\"height\", 0))\n if 0 < size[0] < 10 or 0 < size[1] < 10:\n raise ValueError(f\"dimensions are too small: {size}\")\n except ValueError as e:\n logger.error(f\"Invalid size: {e}\")\n size = 0, 0\n status = 422\n\n frames = int(request.args.get(\"frames\", 0))\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n\n font_name = utils.urls.arg(request.args, \"\", \"font\")\n if font_name == settings.PLACEHOLDER:\n font_name = \"\"\n else:\n try:\n models.Font.objects.get(font_name)\n except ValueError:\n font_name = \"\"\n status = 422\n\n try:\n size = int(request.args.get(\"width\", 0)), int(request.args.get(\"height\", 0))\n if 0 < size[0] < 10 or 0 < size[1] < 10:\n raise ValueError(f\"dimensions are too small: {size}\")\n except ValueError as e:\n logger.error(f\"Invalid size: {e}\")\n size = 0, 0\n status = 422\n\n frames = int(request.args.get(\"frames\", 0))\n\n path = await asyncio.to_thread(","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n\n font_name = utils.urls.arg(request.args, \"\", \"font\")\n if font_name == settings.PLACEHOLDER:\n font_name = \"\"\n else:\n try:\n models.Font.objects.get(font_name)\n except ValueError:\n font_name = \"\"\n status = 422\n\n try:\n size = int(request.args.get(\"width\", 0)), int(request.args.get(\"height\", 0))\n if 0 < size[0] < 10 or 0 < size[1] < 10:\n raise ValueError(f\"dimensions are too small: {size}\")\n except ValueError as e:\n logger.error(f\"Invalid size: {e}\")\n size = 0, 0\n status = 422\n\n frames = int(request.args.get(\"frames\", 0))\n\n path = await asyncio.to_thread(\n utils.images.save,","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n\n font_name = utils.urls.arg(request.args, \"\", \"font\")\n if font_name == settings.PLACEHOLDER:\n font_name = \"\"\n else:\n try:\n models.Font.objects.get(font_name)\n except ValueError:\n font_name = \"\"\n status = 422\n\n try:\n size = int(request.args.get(\"width\", 0)), int(request.args.get(\"height\", 0))\n if 0 < size[0] < 10 or 0 < size[1] < 10:\n raise ValueError(f\"dimensions are too small: {size}\")\n except ValueError as e:\n logger.error(f\"Invalid size: {e}\")\n size = 0, 0\n status = 422\n\n frames = int(request.args.get(\"frames\", 0))\n\n path = await asyncio.to_thread(\n utils.images.save,\n template,\n lines,\n watermark,\n font_name=font_name,\n extension=extension,\n style=style,\n size=size,\n maximum_frames=frames,\n )","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n","type":"inproject"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n","type":"common"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n\n font_name = utils.urls.arg(request.args, \"\", \"font\")\n if font_name == settings.PLACEHOLDER:\n font_name = \"\"\n else:\n try:\n models.Font.objects.get(font_name)\n except ValueError:\n font_name = \"\"\n status = 422\n\n try:\n size = int(request.args.get(\"width\", 0)), int(request.args.get(\"height\", 0))\n if 0 < size[0] < 10 or 0 < size[1] < 10:\n raise ValueError(f\"dimensions are too small: {size}\")\n except ValueError as e:\n logger.error(f\"Invalid size: {e}\")\n size = 0, 0\n status = 422\n","type":"common"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 422\n\n else:\n template = models.Template.objects.get_or_none(id)\n if not template or not template.image.exists():\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n if id != settings.PLACEHOLDER:\n status = 404\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n if extension not in settings.ALLOWED_EXTENSIONS:\n extension = settings.DEFAULT_EXTENSION\n status = 422\n\n font_name = utils.urls.arg(request.args, \"\", \"font\")\n if font_name == settings.PLACEHOLDER:\n font_name = \"\"\n else:\n try:\n models.Font.objects.get(font_name)\n except ValueError:\n font_name = \"\"\n status = 422\n\n try:","type":"common"},{"content":"import asyncio\nfrom contextlib import suppress\n","type":"non_informative"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)","type":"non_informative"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):","type":"random"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:","type":"random"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):","type":"random"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]\n else:\n try:\n payload = request.json or {}\n except exceptions.InvalidUsage:\n payload = {}\n\n with suppress(KeyError):\n payload[\"style\"] = payload.pop(\"style[]\")\n with suppress(KeyError):\n payload[\"text_lines\"] = payload.pop(\"text_lines[]\")\n\n if template_id_required:\n try:\n template_id = payload[\"template_id\"]\n except KeyError:\n return response.json({\"error\": '\"template_id\" is required'}, status=400)\n else:\n template_id = utils.text.slugify(template_id)\n\n style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")\n if isinstance(style, list):\n style = \",\".join([(s.strip() or \"default\") for s in style])\n while style.endswith(\",default\"):\n style = style.removesuffix(\",default\")\n text_lines = utils.urls.arg(payload, [], \"text_lines\")\n font = utils.urls.arg(payload, \"\", \"font\")\n background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")\n extension = utils.urls.arg(payload, \"\", \"extension\")\n\n if style == \"animated\":\n extension = \"gif\"\n style = \"\"\n\n status = 201\n\n if template_id:\n template: models.Template = models.Template.objects.get_or_create(template_id)\n url = template.build_custom_url(\n request,\n text_lines,\n style=style,\n font=font,\n extension=extension,\n )\n if not template.valid:\n status = 404\n template.delete()\n else:\n template = models.Template(\"_custom\")\n url = template.build_custom_url(\n request,\n text_lines,\n background=background,\n style=style,\n font=font,\n extension=extension,\n )\n\n url, _updated = await utils.meta.tokenize(request, url)\n\n if payload.get(\"redirect\", False):\n return response.redirect(utils.urls.add(url, status=\"201\"))\n\n return response.json({\"url\": url}, status=status)\n\n\nasync def preview_image(request, id: str, lines: list[str], style: str):\n error = \"\"\n\n id = utils.urls.clean(id)\n if utils.urls.schema(id):\n template = await models.Template.create(id)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Invalid Background\"\n else:\n template = models.Template.objects.get_or_none(id)\n if not template:\n logger.error(f\"No such template: {id}\")\n template = models.Template.objects.get(\"_error\")\n error = \"Unknown Template\"\n\n if not any(line.strip() for line in lines):\n lines = template.example\n\n if not utils.urls.schema(style):\n style = style.strip().lower()\n if not await template.check(style):\n error = \"Invalid Overlay\"\n\n data, content_type = await asyncio.to_thread(\n utils.images.preview, template, lines, style=style, watermark=error.upper()\n )\n return response.raw(data, content_type=content_type)\n\n\nasync def render_image(\n request,\n id: str,\n slug: str = \"\",\n watermark: str = \"\",\n extension: str = settings.DEFAULT_EXTENSION,\n):\n lines = utils.text.decode(slug)\n asyncio.create_task(utils.meta.track(request, lines))\n\n status = int(utils.urls.arg(request.args, \"200\", \"status\"))\n\n if any(len(part.encode()) > 200 for part in slug.split(\"/\")):\n logger.error(f\"Slug too long: {slug}\")\n slug = slug[:50] + \"...\"\n lines = utils.text.decode(slug)\n template = models.Template.objects.get(\"_error\")\n style = settings.DEFAULT_STYLE\n status = 414\n\n elif id == \"custom\":\n url = utils.urls.arg(request.args, None, \"background\", \"alt\")\n if url:\n template = await models.Template.create(url)\n if not template.image.exists():\n logger.error(f\"Unable to download image URL: {url}\")\n template = models.Template.objects.get(\"_error\")\n if url != settings.PLACEHOLDER:\n status = 415\n\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")\n if not utils.urls.schema(style):\n style = style.lower()\n if not await template.check(style):\n if utils.urls.schema(style):\n status = 415\n elif style != settings.PLACEHOLDER:\n status = 422\n\n else:\n logger.error(\"No image URL specified for custom template\")\n template = models.Template.objects.get(\"_error\")","type":"random"},{"content":"import asyncio\nfrom contextlib import suppress\n\nfrom sanic import exceptions, response\nfrom sanic.log import logger\n\nfrom .. import models, settings, utils\n\n\nasync def generate_url(\n request, template_id: str = \"\", *, template_id_required: bool = False\n):\n if request.form:\n payload = dict(request.form)\n for key in list(payload.keys()):\n if \"lines\" not in key and \"style\" not in key:\n payload[key] = payload.pop(key)[0]","type":"random"}],"string":"[\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n if extension not in settings.ALLOWED_EXTENSIONS:\\n extension = settings.DEFAULT_EXTENSION\\n status = 422\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n if extension not in settings.ALLOWED_EXTENSIONS:\\n extension = settings.DEFAULT_EXTENSION\\n status = 422\\n\\n font_name = utils.urls.arg(request.args, \\\"\\\", \\\"font\\\")\\n if font_name == settings.PLACEHOLDER:\\n font_name = \\\"\\\"\\n else:\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n if extension not in settings.ALLOWED_EXTENSIONS:\\n extension = settings.DEFAULT_EXTENSION\\n status = 422\\n\\n font_name = utils.urls.arg(request.args, \\\"\\\", \\\"font\\\")\\n if font_name == settings.PLACEHOLDER:\\n font_name = \\\"\\\"\\n else:\\n try:\\n models.Font.objects.get(font_name)\\n except ValueError:\\n font_name = \\\"\\\"\\n status = 422\\n\\n try:\\n size = int(request.args.get(\\\"width\\\", 0)), int(request.args.get(\\\"height\\\", 0))\\n if 0 < size[0] < 10 or 0 < size[1] < 10:\\n raise ValueError(f\\\"dimensions are too small: {size}\\\")\\n except ValueError as e:\\n logger.error(f\\\"Invalid size: {e}\\\")\\n size = 0, 0\\n status = 422\\n\\n frames = int(request.args.get(\\\"frames\\\", 0))\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n if extension not in settings.ALLOWED_EXTENSIONS:\\n extension = settings.DEFAULT_EXTENSION\\n status = 422\\n\\n font_name = utils.urls.arg(request.args, \\\"\\\", \\\"font\\\")\\n if font_name == settings.PLACEHOLDER:\\n font_name = \\\"\\\"\\n else:\\n try:\\n models.Font.objects.get(font_name)\\n except ValueError:\\n font_name = \\\"\\\"\\n status = 422\\n\\n try:\\n size = int(request.args.get(\\\"width\\\", 0)), int(request.args.get(\\\"height\\\", 0))\\n if 0 < size[0] < 10 or 0 < size[1] < 10:\\n raise ValueError(f\\\"dimensions are too small: {size}\\\")\\n except ValueError as e:\\n logger.error(f\\\"Invalid size: {e}\\\")\\n size = 0, 0\\n status = 422\\n\\n frames = int(request.args.get(\\\"frames\\\", 0))\\n\\n path = await asyncio.to_thread(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n if extension not in settings.ALLOWED_EXTENSIONS:\\n extension = settings.DEFAULT_EXTENSION\\n status = 422\\n\\n font_name = utils.urls.arg(request.args, \\\"\\\", \\\"font\\\")\\n if font_name == settings.PLACEHOLDER:\\n font_name = \\\"\\\"\\n else:\\n try:\\n models.Font.objects.get(font_name)\\n except ValueError:\\n font_name = \\\"\\\"\\n status = 422\\n\\n try:\\n size = int(request.args.get(\\\"width\\\", 0)), int(request.args.get(\\\"height\\\", 0))\\n if 0 < size[0] < 10 or 0 < size[1] < 10:\\n raise ValueError(f\\\"dimensions are too small: {size}\\\")\\n except ValueError as e:\\n logger.error(f\\\"Invalid size: {e}\\\")\\n size = 0, 0\\n status = 422\\n\\n frames = int(request.args.get(\\\"frames\\\", 0))\\n\\n path = await asyncio.to_thread(\\n utils.images.save,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n if extension not in settings.ALLOWED_EXTENSIONS:\\n extension = settings.DEFAULT_EXTENSION\\n status = 422\\n\\n font_name = utils.urls.arg(request.args, \\\"\\\", \\\"font\\\")\\n if font_name == settings.PLACEHOLDER:\\n font_name = \\\"\\\"\\n else:\\n try:\\n models.Font.objects.get(font_name)\\n except ValueError:\\n font_name = \\\"\\\"\\n status = 422\\n\\n try:\\n size = int(request.args.get(\\\"width\\\", 0)), int(request.args.get(\\\"height\\\", 0))\\n if 0 < size[0] < 10 or 0 < size[1] < 10:\\n raise ValueError(f\\\"dimensions are too small: {size}\\\")\\n except ValueError as e:\\n logger.error(f\\\"Invalid size: {e}\\\")\\n size = 0, 0\\n status = 422\\n\\n frames = int(request.args.get(\\\"frames\\\", 0))\\n\\n path = await asyncio.to_thread(\\n utils.images.save,\\n template,\\n lines,\\n watermark,\\n font_name=font_name,\\n extension=extension,\\n style=style,\\n size=size,\\n maximum_frames=frames,\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\",\n \"type\": \"common\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n if extension not in settings.ALLOWED_EXTENSIONS:\\n extension = settings.DEFAULT_EXTENSION\\n status = 422\\n\\n font_name = utils.urls.arg(request.args, \\\"\\\", \\\"font\\\")\\n if font_name == settings.PLACEHOLDER:\\n font_name = \\\"\\\"\\n else:\\n try:\\n models.Font.objects.get(font_name)\\n except ValueError:\\n font_name = \\\"\\\"\\n status = 422\\n\\n try:\\n size = int(request.args.get(\\\"width\\\", 0)), int(request.args.get(\\\"height\\\", 0))\\n if 0 < size[0] < 10 or 0 < size[1] < 10:\\n raise ValueError(f\\\"dimensions are too small: {size}\\\")\\n except ValueError as e:\\n logger.error(f\\\"Invalid size: {e}\\\")\\n size = 0, 0\\n status = 422\\n\",\n \"type\": \"common\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 422\\n\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template or not template.image.exists():\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if id != settings.PLACEHOLDER:\\n status = 404\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n if extension not in settings.ALLOWED_EXTENSIONS:\\n extension = settings.DEFAULT_EXTENSION\\n status = 422\\n\\n font_name = utils.urls.arg(request.args, \\\"\\\", \\\"font\\\")\\n if font_name == settings.PLACEHOLDER:\\n font_name = \\\"\\\"\\n else:\\n try:\\n models.Font.objects.get(font_name)\\n except ValueError:\\n font_name = \\\"\\\"\\n status = 422\\n\\n try:\",\n \"type\": \"common\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\",\n \"type\": \"random\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\",\n \"type\": \"random\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\",\n \"type\": \"random\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\\n else:\\n try:\\n payload = request.json or {}\\n except exceptions.InvalidUsage:\\n payload = {}\\n\\n with suppress(KeyError):\\n payload[\\\"style\\\"] = payload.pop(\\\"style[]\\\")\\n with suppress(KeyError):\\n payload[\\\"text_lines\\\"] = payload.pop(\\\"text_lines[]\\\")\\n\\n if template_id_required:\\n try:\\n template_id = payload[\\\"template_id\\\"]\\n except KeyError:\\n return response.json({\\\"error\\\": '\\\"template_id\\\" is required'}, status=400)\\n else:\\n template_id = utils.text.slugify(template_id)\\n\\n style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\\n if isinstance(style, list):\\n style = \\\",\\\".join([(s.strip() or \\\"default\\\") for s in style])\\n while style.endswith(\\\",default\\\"):\\n style = style.removesuffix(\\\",default\\\")\\n text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\\n font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\\n background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\\n extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\\n\\n if style == \\\"animated\\\":\\n extension = \\\"gif\\\"\\n style = \\\"\\\"\\n\\n status = 201\\n\\n if template_id:\\n template: models.Template = models.Template.objects.get_or_create(template_id)\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n if not template.valid:\\n status = 404\\n template.delete()\\n else:\\n template = models.Template(\\\"_custom\\\")\\n url = template.build_custom_url(\\n request,\\n text_lines,\\n background=background,\\n style=style,\\n font=font,\\n extension=extension,\\n )\\n\\n url, _updated = await utils.meta.tokenize(request, url)\\n\\n if payload.get(\\\"redirect\\\", False):\\n return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\\n\\n return response.json({\\\"url\\\": url}, status=status)\\n\\n\\nasync def preview_image(request, id: str, lines: list[str], style: str):\\n error = \\\"\\\"\\n\\n id = utils.urls.clean(id)\\n if utils.urls.schema(id):\\n template = await models.Template.create(id)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Invalid Background\\\"\\n else:\\n template = models.Template.objects.get_or_none(id)\\n if not template:\\n logger.error(f\\\"No such template: {id}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n error = \\\"Unknown Template\\\"\\n\\n if not any(line.strip() for line in lines):\\n lines = template.example\\n\\n if not utils.urls.schema(style):\\n style = style.strip().lower()\\n if not await template.check(style):\\n error = \\\"Invalid Overlay\\\"\\n\\n data, content_type = await asyncio.to_thread(\\n utils.images.preview, template, lines, style=style, watermark=error.upper()\\n )\\n return response.raw(data, content_type=content_type)\\n\\n\\nasync def render_image(\\n request,\\n id: str,\\n slug: str = \\\"\\\",\\n watermark: str = \\\"\\\",\\n extension: str = settings.DEFAULT_EXTENSION,\\n):\\n lines = utils.text.decode(slug)\\n asyncio.create_task(utils.meta.track(request, lines))\\n\\n status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\\n\\n if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\\n logger.error(f\\\"Slug too long: {slug}\\\")\\n slug = slug[:50] + \\\"...\\\"\\n lines = utils.text.decode(slug)\\n template = models.Template.objects.get(\\\"_error\\\")\\n style = settings.DEFAULT_STYLE\\n status = 414\\n\\n elif id == \\\"custom\\\":\\n url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\\n if url:\\n template = await models.Template.create(url)\\n if not template.image.exists():\\n logger.error(f\\\"Unable to download image URL: {url}\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\\n if url != settings.PLACEHOLDER:\\n status = 415\\n\\n style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\\n if not utils.urls.schema(style):\\n style = style.lower()\\n if not await template.check(style):\\n if utils.urls.schema(style):\\n status = 415\\n elif style != settings.PLACEHOLDER:\\n status = 422\\n\\n else:\\n logger.error(\\\"No image URL specified for custom template\\\")\\n template = models.Template.objects.get(\\\"_error\\\")\",\n \"type\": \"random\"\n },\n {\n \"content\": \"import asyncio\\nfrom contextlib import suppress\\n\\nfrom sanic import exceptions, response\\nfrom sanic.log import logger\\n\\nfrom .. import models, settings, utils\\n\\n\\nasync def generate_url(\\n request, template_id: str = \\\"\\\", *, template_id_required: bool = False\\n):\\n if request.form:\\n payload = dict(request.form)\\n for key in list(payload.keys()):\\n if \\\"lines\\\" not in key and \\\"style\\\" not in key:\\n payload[key] = payload.pop(key)[0]\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":["async def render_image(","async def preview_image(request, id: str, lines: list[str], style: str):"," lines = utils.text.decode(slug)"," template = models.Template.objects.get(\"_error\")"," url = utils.urls.arg(request.args, None, \"background\", \"alt\")"," if url:"," template = await models.Template.create(url)"," if not template.image.exists():"," logger.error(f\"Unable to download image URL: {url}\")"," if url != settings.PLACEHOLDER:"," style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\")"," if not utils.urls.schema(style):"," if not await template.check(style):"," if utils.urls.schema(style):"," template = models.Template.objects.get_or_none(id)"," if not template or not template.image.exists():"," template_id = utils.text.slugify(template_id)"," style: str = utils.urls.arg(payload, \"\", \"style\", \"overlay\", \"alt\")"," style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \"style\", \"alt\")"," text_lines = utils.urls.arg(payload, [], \"text_lines\")"," font = utils.urls.arg(payload, \"\", \"font\")"," background = utils.urls.arg(payload, \"\", \"background\", \"image_url\")"," extension = utils.urls.arg(payload, \"\", \"extension\")"," font_name = utils.urls.arg(request.args, \"\", \"font\")"," template: models.Template = models.Template.objects.get_or_create(template_id)"," url = template.build_custom_url("," models.Font.objects.get(font_name)"," if not template.valid:"," template.delete()"," template = models.Template(\"_custom\")"," path = await asyncio.to_thread("," utils.images.save,"," template,"," url, _updated = await utils.meta.tokenize(request, url)"," return response.redirect(utils.urls.add(url, status=\"201\"))"," return response.json({\"url\": url}, status=status)"," return await response.file(path, status)"," id = utils.urls.clean(id)"," if utils.urls.schema(id):"," template = await models.Template.create(id)"," if not template:"," lines = template.example"," data, content_type = await asyncio.to_thread("," utils.images.preview, template, lines, style=style, watermark=error.upper()"," return response.raw(data, content_type=content_type)"," asyncio.create_task(utils.meta.track(request, lines))"," status = int(utils.urls.arg(request.args, \"200\", \"status\"))"," if any(len(part.encode()) > 200 for part in slug.split(\"/\")):"," if payload.get(\"redirect\", False):"," frames = int(request.args.get(\"frames\", 0))"," size = int(request.args.get(\"width\", 0)), int(request.args.get(\"height\", 0))","from sanic import exceptions, response",""," error = \"\""," status = 415"," error = \"Invalid Overlay\""," style = settings.DEFAULT_STYLE"," else:"],"string":"[\n \"async def render_image(\",\n \"async def preview_image(request, id: str, lines: list[str], style: str):\",\n \" lines = utils.text.decode(slug)\",\n \" template = models.Template.objects.get(\\\"_error\\\")\",\n \" url = utils.urls.arg(request.args, None, \\\"background\\\", \\\"alt\\\")\",\n \" if url:\",\n \" template = await models.Template.create(url)\",\n \" if not template.image.exists():\",\n \" logger.error(f\\\"Unable to download image URL: {url}\\\")\",\n \" if url != settings.PLACEHOLDER:\",\n \" style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\")\",\n \" if not utils.urls.schema(style):\",\n \" if not await template.check(style):\",\n \" if utils.urls.schema(style):\",\n \" template = models.Template.objects.get_or_none(id)\",\n \" if not template or not template.image.exists():\",\n \" template_id = utils.text.slugify(template_id)\",\n \" style: str = utils.urls.arg(payload, \\\"\\\", \\\"style\\\", \\\"overlay\\\", \\\"alt\\\")\",\n \" style = utils.urls.arg(request.args, settings.DEFAULT_STYLE, \\\"style\\\", \\\"alt\\\")\",\n \" text_lines = utils.urls.arg(payload, [], \\\"text_lines\\\")\",\n \" font = utils.urls.arg(payload, \\\"\\\", \\\"font\\\")\",\n \" background = utils.urls.arg(payload, \\\"\\\", \\\"background\\\", \\\"image_url\\\")\",\n \" extension = utils.urls.arg(payload, \\\"\\\", \\\"extension\\\")\",\n \" font_name = utils.urls.arg(request.args, \\\"\\\", \\\"font\\\")\",\n \" template: models.Template = models.Template.objects.get_or_create(template_id)\",\n \" url = template.build_custom_url(\",\n \" models.Font.objects.get(font_name)\",\n \" if not template.valid:\",\n \" template.delete()\",\n \" template = models.Template(\\\"_custom\\\")\",\n \" path = await asyncio.to_thread(\",\n \" utils.images.save,\",\n \" template,\",\n \" url, _updated = await utils.meta.tokenize(request, url)\",\n \" return response.redirect(utils.urls.add(url, status=\\\"201\\\"))\",\n \" return response.json({\\\"url\\\": url}, status=status)\",\n \" return await response.file(path, status)\",\n \" id = utils.urls.clean(id)\",\n \" if utils.urls.schema(id):\",\n \" template = await models.Template.create(id)\",\n \" if not template:\",\n \" lines = template.example\",\n \" data, content_type = await asyncio.to_thread(\",\n \" utils.images.preview, template, lines, style=style, watermark=error.upper()\",\n \" return response.raw(data, content_type=content_type)\",\n \" asyncio.create_task(utils.meta.track(request, lines))\",\n \" status = int(utils.urls.arg(request.args, \\\"200\\\", \\\"status\\\"))\",\n \" if any(len(part.encode()) > 200 for part in slug.split(\\\"/\\\")):\",\n \" if payload.get(\\\"redirect\\\", False):\",\n \" frames = int(request.args.get(\\\"frames\\\", 0))\",\n \" size = int(request.args.get(\\\"width\\\", 0)), int(request.args.get(\\\"height\\\", 0))\",\n \"from sanic import exceptions, response\",\n \"\",\n \" error = \\\"\\\"\",\n \" status = 415\",\n \" error = \\\"Invalid Overlay\\\"\",\n \" style = settings.DEFAULT_STYLE\",\n \" else:\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":203,"cells":{"repo_id":{"kind":"number","value":72,"string":"72"},"repo_name":{"kind":"string","value":"rstudio__py-shiny"},"project_context":{"kind":"string","value":"rstudio__py-shiny METASEP\n\ntests/test_utils_async.py METASEP\n\"\"\"Tests for `shiny.utils` async-related functions.\"\"\"\n\nimport pytest\nimport asyncio\nfrom typing import Iterator, List\n\nfrom shiny.utils import run_coro_sync\n\n\ndef range_sync(n: int) -> Iterator[int]:\n \"\"\"\n An implementation of `range()` which uses `yield`, but doesn't actually\n give up control to the event loop.\n \"\"\"\n num = 0\n while num < n:\n yield num\n num += 1\n\n\nasync def make_list_sync(n: int) -> List[int]:\n \"\"\"\n An `async` function that is in fact synchronous; it does not actually give\n up control.\n \"\"\"\n x: list[int] = []\n for i in range_sync(n):\n x.append(i)\n return x\n\n\nasync def make_list_async(n: int) -> List[int]:\n \"\"\"An `async` function that gives up control.\"\"\"\n x: list[int] = []\n for i in range_sync(n):\n await asyncio.sleep(0)\n x.append(i)\n return x\n\n\ndef test_run_coro_sync():\n # Running a coroutine that is in fact synchronous works fine.\n res = run_coro_sync(make_list_sync(1))\n assert res == [0]\n\n res = run_coro_sync(make_list_sync(3))\n assert res == [0, 1, 2]\n\n # Should error because the asyncio.sleep() gives up control.\n with pytest.raises(RuntimeError):\n run_coro_sync(make_list_async(1))\n\n with pytest.raises(RuntimeError):\n run_coro_sync(make_list_async(3))\n\n # Same with a direct call to asyncio.sleep()\n with pytest.raises(RuntimeError):\n run_coro_sync(asyncio.sleep(0))\n\n with pytest.raises(RuntimeError):\n run_coro_sync(asyncio.sleep(0.1))\n\n\ndef test_run_coro_async():\n async def async_main():\n # awaited calls to the in-fact-synchronous function are OK.\n res = await make_list_sync(3)\n assert res == [0, 1, 2]\n\n # awaited calls to the async function are OK.\n res = await make_list_async(3)\n assert res == [0, 1, 2]\n\n await asyncio.sleep(0)\n\n # Calling run_coro_sync() should be the same as when called normally\n # (from a regular function, not an async function run by asyncio.run()).\n res = run_coro_sync(make_list_sync(3))\n assert res == [0, 1, 2]\n\n with pytest.raises(RuntimeError):\n run_coro_sync(make_list_async(3))\n with pytest.raises(RuntimeError):\n run_coro_sync(asyncio.sleep(0))\n\n asyncio.run(async_main())\n\n\ndef test_run_coro_sync_type_check():\n # Should raise an error if passed a regular generator (as opposed to a\n # coroutine object).\n with pytest.raises(TypeError):\n run_coro_sync(range_sync(0)) # type: ignore\n\n\ndef test_async_generator():\n # run_coro_sync() can't run async generators, but it can run async functions\n # which call async generators.\n\n # An async generator\n async def async_gen_range(n: int):\n for i in range(n):\n yield i\n\n # An async function which uses the generator\n async def main(n: int):\n x: list[int] = []\n async for i in async_gen_range(n):\n x.append(i)\n return x\n\n # Running the async function works fine.\n res = run_coro_sync(main(3))\n assert res == [0, 1, 2]\n\n # Attempting to run the async generator results in an error, because it\n # doesn't return a coroutine object.\n with pytest.raises(TypeError):\n run_coro_sync(async_gen_range(3)) # type: ignore\n\n\ndef test_create_task():\n # Should be OK to call create_task().\n async def create_task_wrapper():\n async def inner():\n asyncio.create_task(make_list_async(3))\n\n run_coro_sync(inner())\n\n asyncio.run(create_task_wrapper())\n\n # Should not be OK to await a task, because it doesn't complete immediately.\n async def create_task_wrapper2():\n async def inner():\n await asyncio.create_task(make_list_async(3))\n\n run_coro_sync(inner())\n\n with pytest.raises(RuntimeError):\n asyncio.run(create_task_wrapper2())\n\ntests/test_shinysession.py METASEP\n\"\"\"Tests for `shiny.shinysession`.\"\"\"\n\nimport pytest\n\nfrom shiny import *\n\n\ndef test_require_active_session_error_messages():\n # _require_active_session() should report the caller's name when an error occurs.\n with pytest.raises(RuntimeError, match=r\"Progress\\(\\) must be called\"):\n Progress()\n\n with pytest.raises(RuntimeError, match=r\"notification_remove\\(\\) must be called.*\"):\n notification_remove(\"abc\")\n\ntests/test_reactives.py METASEP\n\"\"\"Tests for `shiny.reactives` and `shiny.reactcore`.\"\"\"\n\nimport pytest\nimport asyncio\n\nimport shiny.reactcore as reactcore\nfrom shiny.reactives import *\n\n\ndef test_flush_runs_newly_invalidated():\n \"\"\"\n Make sure that a flush will also run any reactives that were invalidated\n during the flush.\n \"\"\"\n\n v1 = ReactiveVal(1)\n v2 = ReactiveVal(2)\n\n v2_result = None\n # In practice, on the first flush, Observers run in the order that they were\n # created. Our test checks that o2 runs _after_ o1.\n @observe()\n def o2():\n nonlocal v2_result\n v2_result = v2()\n\n @observe()\n def o1():\n v2(v1())\n\n asyncio.run(reactcore.flush())\n assert v2_result == 1\n assert o2._exec_count == 2\n assert o1._exec_count == 1\n\n\ndef test_flush_runs_newly_invalidated_async():\n \"\"\"\n Make sure that a flush will also run any reactives that were invalidated\n during the flush. (Same as previous test, but async.)\n \"\"\"\n\n v1 = ReactiveVal(1)\n v2 = ReactiveVal(2)\n\n v2_result = None\n # In practice, on the first flush, Observers run in the order that they were\n # created. Our test checks that o2 runs _after_ o1.\n @observe_async()\n async def o2():\n nonlocal v2_result\n v2_result = v2()\n\n @observe_async()\n async def o1():\n v2(v1())\n\n asyncio.run(reactcore.flush())\n assert v2_result == 1\n assert o2._exec_count == 2\n assert o1._exec_count == 1\n\n\n# ======================================================================\n# Setting ReactiveVal to same value doesn't invalidate downstream\n# ======================================================================\ndef test_reactive_val_same_no_invalidate():\n v = ReactiveVal(1)\n\n @observe()\n def o():\n v()\n\n asyncio.run(reactcore.flush())\n assert o._exec_count == 1\n\n v(1)\n asyncio.run(reactcore.flush())\n assert o._exec_count == 1\n\n\ntest_reactive_val_same_no_invalidate()\n\n# ======================================================================\n# Recursive calls to reactives\n# ======================================================================\ndef test_recursive_reactive():\n v = ReactiveVal(5)\n\n @reactive()\n def r():\n if v() == 0:\n return 0\n v(v() - 1)\n r()\n\n @observe()\n def o():\n r()\n\n asyncio.run(reactcore.flush())\n assert o._exec_count == 2\n assert r._exec_count == 6\n assert isolate(v) == 0\n\n\ndef test_recursive_reactive_async():\n v = ReactiveVal(5)\n\n @reactive_async()\n async def r():\n if v() == 0:\n return 0\n v(v() - 1)\n await r()\n\n @observe_async()\n async def o():\n await r()\n\n asyncio.run(reactcore.flush())\n assert o._exec_count == 2\n assert r._exec_count == 6\n assert isolate(v) == 0\n\n\n# ======================================================================\n# Concurrent/sequential async\n# ======================================================================\ndef test_async_concurrent():\n x: ReactiveVal[int] = ReactiveVal(1)\n results: list[int] = []\n exec_order: list[str] = []\n\n async def react_chain(n: int):\n @reactive_async()\n async def r():\n nonlocal exec_order\n exec_order.append(f\"r{n}-1\")\n await asyncio.sleep(0)\n exec_order.append(f\"r{n}-2\")\n return x() + 10\n\n @observe_async()\n async def _():\n nonlocal exec_order\n exec_order.append(f\"o{n}-1\")\n await asyncio.sleep(0)\n exec_order.append(f\"o{n}-2\")\n val = await r()\n exec_order.append(f\"o{n}-3\")\n results.append(val + n * 100)\n\n async def go():\n await asyncio.gather(react_chain(1), react_chain(2))\n\n await reactcore.flush()\n\n x(5)\n await reactcore.flush()\n\n asyncio.run(go())\n\n assert results == [111, 211, 115, 215]\n\n # fmt: off\n # This is the order of execution if async observers are run with separate\n # (interleaved) tasks. When it hits an `asyncio.sleep(0)`, it will yield\n # control and then the other observer in the other task will run.\n assert exec_order == [\n 'o1-1', 'o2-1',\n 'o1-2', 'o2-2',\n 'r1-1', 'r2-1',\n 'r1-2', 'r2-2',\n 'o1-3', 'o2-3',\n 'o1-1', 'o2-1',\n 'o1-2', 'o2-2',\n 'r1-1', 'r2-1',\n 'r1-2', 'r2-2',\n 'o1-3', 'o2-3'\n ]\n # fmt: on\n\n\ndef test_async_sequential():\n # Same as previous, but with a sequential flush, as in\n # `flush(concurrent=False)`.\n x: ReactiveVal[int] = ReactiveVal(1)\n results: list[int] = []\n exec_order: list[str] = []\n\n async def react_chain(n: int):\n @reactive_async()\n async def r():\n nonlocal exec_order\n exec_order.append(f\"r{n}-1\")\n await asyncio.sleep(0)\n exec_order.append(f\"r{n}-2\")\n return x() + 10\n\n @observe_async()\n async def _():\n nonlocal exec_order\n exec_order.append(f\"o{n}-1\")\n await asyncio.sleep(0)\n exec_order.append(f\"o{n}-2\")\n val = await r()\n exec_order.append(f\"o{n}-3\")\n results.append(val + n * 100)\n\n async def go():\n await asyncio.gather(react_chain(1), react_chain(2))\n\n await reactcore.flush(concurrent=False)\n\n x(5)\n await reactcore.flush(concurrent=False)\n\n asyncio.run(go())\n\n assert results == [111, 211, 115, 215]\n\n # This is the order of execution if the async observers are run\n # sequentially. The `asyncio.sleep(0)` still yields control, but since there\n # are no other observers scheduled, it will simply resume at the same point.\n # fmt: off\n assert exec_order == [\n 'o1-1', 'o1-2', 'r1-1', 'r1-2', 'o1-3',\n 'o2-1', 'o2-2', 'r2-1', 'r2-2', 'o2-3',\n 'o1-1', 'o1-2', 'r1-1', 'r1-2', 'o1-3',\n 'o2-1', 'o2-2', 'r2-1', 'r2-2', 'o2-3'\n ]\n # fmt: on\n\n\n# ======================================================================\n# isolate()\n# ======================================================================\ndef test_isolate_basic_value():\n # isolate() returns basic value\n assert isolate(lambda: 123) == 123\n assert isolate(lambda: None) is None\n\n\ndef test_isolate_basic_without_context():\n # isolate() works with Reactive and ReactiveVal; allows executing without a\n # reactive context.\n v = ReactiveVal(1)\n\n @reactive()\n def r():\n return v() + 10\n\n def get_r():\n return r()\n\n assert isolate(lambda: v()) == 1\n assert isolate(v) == 1\n assert isolate(lambda: r()) == 11\n assert isolate(r) == 11\n assert isolate(get_r) == 11\n\n\ndef test_isolate_prevents_dependency():\n v = ReactiveVal(1)\n\n @reactive()\n def r():\n return v() + 10\n\n v_dep = ReactiveVal(1) # Use this only for invalidating the observer\n o_val = None\n\n @observe()\n def o():\n nonlocal o_val\n v_dep()\n o_val = isolate(lambda: r())\n\n asyncio.run(reactcore.flush())\n assert o_val == 11\n\n # Changing v() shouldn't invalidate o\n v(2)\n asyncio.run(reactcore.flush())\n assert o_val == 11\n assert o._exec_count == 1\n\n # v_dep() should invalidate the observer\n v_dep(2)\n asyncio.run(reactcore.flush())\n assert o_val == 12\n assert o._exec_count == 2\n\n\n# ======================================================================\n# isolate_async()\n# ======================================================================\ndef test_isolate_async_basic_value():\n async def f():\n return 123\n\n async def go():\n assert await isolate_async(f) == 123\n\n asyncio.run(go())\n\n\ndef test_isolate_async_basic_without_context():\n # isolate_async() works with Reactive and ReactiveVal; allows executing\n # without a reactive context.\n v = ReactiveVal(1)\n\n @reactive_async()\n async def r():\n return v() + 10\n\n async def get_r():\n return await r()\n\n async def go():\n assert await isolate_async(r) == 11\n assert await isolate_async(get_r) == 11\n\n asyncio.run(go())\n\n\ndef test_isolate_async_prevents_dependency():\n v = ReactiveVal(1)\n\n @reactive_async()\n async def r():\n return v() + 10\n\n v_dep = ReactiveVal(1) # Use this only for invalidating the observer\n o_val = None\n\n @observe_async()\n async def o():\n nonlocal o_val\n v_dep()\n o_val = await isolate_async(r)\n\n asyncio.run(reactcore.flush())\n assert o_val == 11\n\n # Changing v() shouldn't invalidate o\n v(2)\n asyncio.run(reactcore.flush())\n assert o_val == 11\n assert o._exec_count == 1\n\n # v_dep() should invalidate the observer\n v_dep(2)\n asyncio.run(reactcore.flush())\n assert o_val == 12\n assert o._exec_count == 2\n\n\n# ======================================================================\n# Priority for observers\n# ======================================================================\ndef test_observer_priority():\n v = ReactiveVal(1)\n results: list[int] = []\n\n @observe(priority=1)\n def o1():\n nonlocal results\n v()\n results.append(1)\n\n @observe(priority=2)\n def o2():\n nonlocal results\n v()\n results.append(2)\n\n @observe(priority=1)\n def o3():\n nonlocal results\n v()\n results.append(3)\n\n asyncio.run(reactcore.flush())\n assert results == [2, 1, 3]\n\n # Add another observer with priority 2. Only this one will run (until we\n # invalidate others by changing v).\n @observe(priority=2)\n def o4():\n nonlocal results\n v()\n results.append(4)\n\n results.clear()\n asyncio.run(reactcore.flush())\n assert results == [4]\n\n # Change v and run again, to make sure results are stable\n results.clear()\n v(2)\n asyncio.run(reactcore.flush())\n assert results == [2, 4, 1, 3]\n\n results.clear()\n v(3)\n asyncio.run(reactcore.flush())\n assert results == [2, 4, 1, 3]\n\n\n# Same as previous, but with async\ndef test_observer_async_priority():\n v = ReactiveVal(1)\n results: list[int] = []\n\n @observe_async(priority=1)\n async def o1():\n nonlocal results\n v()\n results.append(1)\n\n @observe_async(priority=2)\n async def o2():\n nonlocal results\n v()\n results.append(2)\n\n @observe_async(priority=1)\n async def o3():\n nonlocal results\n v()\n results.append(3)\n\n asyncio.run(reactcore.flush())\n assert results == [2, 1, 3]\n\n # Add another observer with priority 2. Only this one will run (until we\n # invalidate others by changing v).\n @observe_async(priority=2)\n async def o4():\n nonlocal results\n v()\n results.append(4)\n\n results.clear()\n asyncio.run(reactcore.flush())\n assert results == [4]\n\n # Change v and run again, to make sure results are stable\n results.clear()\n v(2)\n asyncio.run(reactcore.flush())\n assert results == [2, 4, 1, 3]\n\n results.clear()\n v(3)\n asyncio.run(reactcore.flush())\n assert results == [2, 4, 1, 3]\n\n\n# ======================================================================\n# Destroying observers\n# ======================================================================\ndef test_observer_destroy():\n v = ReactiveVal(1)\n results: list[int] = []\n\n @observe()\n def o1():\n nonlocal results\n v()\n results.append(1)\n\n asyncio.run(reactcore.flush())\n assert results == [1]\n\n v(2)\n o1.destroy()\n asyncio.run(reactcore.flush())\n assert results == [1]\n\n # Same as above, but destroy before running first time\n v = ReactiveVal(1)\n results: list[int] = []\n\n @observe()\n def o2():\n nonlocal results\n v()\n results.append(1)\n\n o2.destroy()\n asyncio.run(reactcore.flush())\n assert results == []\n\ntests/test_datastructures.py METASEP\n\"\"\"Tests for `shiny.datastructures`.\"\"\"\n\nfrom shiny.datastructures import PriorityQueueFIFO\n\n\ndef test_priority_queue_fifo():\n q: PriorityQueueFIFO[str] = PriorityQueueFIFO()\n\n # The random-seeming items are here to ensure that the value of the items\n # do not affect the order that they go into the queue.\n q.put(1, \"9\")\n q.put(1, \"8\")\n q.put(2, \"6\")\n q.put(2, \"7\")\n\n assert q.get() == \"6\"\n assert q.get() == \"7\"\n assert q.get() == \"9\"\n assert q.get() == \"8\"\n\ntests/__init__.py METASEP\n\"\"\"Unit test package for shiny.\"\"\"\n\nshiny/utils.py METASEP\nfrom typing import (\n TYPE_CHECKING,\n Callable,\n Awaitable,\n TypeVar,\n Optional,\n List,\n Dict,\n Any,\n)\nimport os\nimport tempfile\nimport importlib\nimport inspect\nimport secrets\n\nfrom htmltools import TagList, TagChildArg\n\n# ==============================================================================\n# Misc utility functions\n# ==============================================================================\ndef rand_hex(bytes: int) -> str:\n \"\"\"\n Creates a random hexadecimal string of size `bytes`. The length in\n characters will be bytes*2.\n \"\"\"\n format_str = \"{{:0{}x}}\".format(bytes * 2)\n return format_str.format(secrets.randbits(bytes * 8))\n\n\n# ==============================================================================\n# Async-related functions\n# ==============================================================================\n\nT = TypeVar(\"T\")\n\n\ndef wrap_async(fn: Callable[[], T]) -> Callable[[], Awaitable[T]]:\n \"\"\"\n Wrap a synchronous function that returns T, and return an async function\n that wraps the original function.\n \"\"\"\n\n async def fn_async() -> T:\n return fn()\n\n return fn_async\n\n\ndef is_async_callable(obj: object) -> bool:\n \"\"\"\n Returns True if `obj` is an `async def` function, or if it's an object with\n a `__call__` method which is an `async def` function.\n \"\"\"\n if inspect.iscoroutinefunction(obj):\n return True\n if hasattr(obj, \"__call__\"):\n if inspect.iscoroutinefunction(obj.__call__): # type: ignore\n return True\n\n return False\n\n\n# See https://stackoverflow.com/a/59780868/412655 for an excellent explanation\n# of how this stuff works.\n# For a more in-depth explanation, see\n# https://snarky.ca/how-the-heck-does-async-await-work-in-python-3-5/.\ndef run_coro_sync(coro: Awaitable[T]) -> T:\n \"\"\"\n Run a coroutine that is in fact synchronous. Given a coroutine (which is\n returned by calling an `async def` function), this function will run the\n coroutine for one iteration. If the coroutine completes, then return the\n value. If it does not complete, then it will throw a `RuntimeError`.\n\n What it means to be \"in fact synchronous\": the coroutine must not yield\n control to the event loop. A coroutine may have an `await` expression in it,\n and that may call another function that has an `await`, but the chain will\n only yield control if a `yield` statement bubbles through `await`s all the\n way up. For example, `await asyncio.sleep(0)` will have a `yield` which\n bubbles up to the next level. Note that a `yield` in a generator used the\n regular way (not with `await`) will not bubble up, since it is not awaited\n on.\n \"\"\"\n if not inspect.iscoroutine(coro):\n raise TypeError(\"run_coro_sync requires a Coroutine object.\")\n\n try:\n coro.send(None)\n except StopIteration as e:\n return e.value\n\n raise RuntimeError(\n \"async function yielded control; it did not finish in one iteration.\"\n )\n\n\n# ==============================================================================\n# System-related functions\n# ==============================================================================\n\n# Return directory that a package lives in.\ndef package_dir(package: str) -> str:\n with tempfile.TemporaryDirectory():\n pkg_file = importlib.import_module(\".\", package=package).__file__\n return os.path.dirname(pkg_file)\n\nshiny/types.py METASEP\n# Sentinel value - indicates a missing value in a function call.\nclass MISSING_TYPE:\n pass\n\n\nMISSING = MISSING_TYPE()\n\nshiny/shinysession.py METASEP\n__all__ = (\n \"ShinySession\",\n \"Outputs\",\n \"get_current_session\",\n \"session_context\",\n)\n\nimport sys\nimport json\nimport re\nimport asyncio\nimport warnings\nimport typing\nimport mimetypes\nfrom contextvars import ContextVar, Token\nfrom contextlib import contextmanager\nfrom typing import (\n TYPE_CHECKING,\n Callable,\n Optional,\n Union,\n Awaitable,\n Dict,\n List,\n Any,\n)\nfrom starlette.requests import Request\n\nfrom starlette.responses import Response, HTMLResponse, PlainTextResponse\n\nif sys.version_info >= (3, 8):\n from typing import TypedDict\nelse:\n from typing_extensions import TypedDict\n\nif TYPE_CHECKING:\n from .shinyapp import ShinyApp\n\nfrom htmltools import TagChildArg, TagList, HTMLDependency\n\nfrom .reactives import ReactiveValues, Observer, ObserverAsync\nfrom .connmanager import Connection, ConnectionClosed\nfrom . import render\nfrom . import utils\nfrom .fileupload import FileInfo, FileUploadManager\nfrom .input_handlers import input_handlers\n\n# This cast is necessary because if the type checker thinks that if\n# \"tag\" isn't in `message`, then it's not a ClientMessage object.\n# This will be fixable when TypedDict items can be marked as\n# potentially missing, in Python 3.10, with PEP 655.\nclass ClientMessage(TypedDict):\n method: str\n\n\nclass ClientMessageInit(ClientMessage):\n data: Dict[str, object]\n\n\nclass ClientMessageUpdate(ClientMessage):\n data: Dict[str, object]\n\n\n# For messages where \"method\" is something other than \"init\" or \"update\".\nclass ClientMessageOther(ClientMessage):\n args: List[object]\n tag: int\n\n\nclass ShinySession:\n # ==========================================================================\n # Initialization\n # ==========================================================================\n def __init__(\n self, app: \"ShinyApp\", id: str, conn: Connection, debug: bool = False\n ) -> None:\n self.app: ShinyApp = app\n self.id: str = id\n self._conn: Connection = conn\n self._debug: bool = debug\n\n self.input: ReactiveValues = ReactiveValues()\n self.output: Outputs = Outputs(self)\n\n self._message_queue_in: asyncio.Queue[Optional[ClientMessage]] = asyncio.Queue()\n self._message_queue_out: List[Dict[str, object]] = []\n\n self._message_handlers: Dict[\n str, Callable[..., Awaitable[object]]\n ] = self._create_message_handlers()\n self._file_upload_manager: FileUploadManager = FileUploadManager()\n self._on_ended_callbacks: List[Callable[[], None]] = []\n self._has_run_session_end_tasks: bool = False\n\n self._register_session_end_callbacks()\n\n with session_context(self):\n self.app.server(self)\n\n def _register_session_end_callbacks(self) -> None:\n # This is to be called from the initialization. It registers functions\n # that are called when a session ends.\n\n # Clear file upload directories, if present\n self._on_ended_callbacks.append(self._file_upload_manager.rm_upload_dir)\n\n def _run_session_end_tasks(self) -> None:\n if self._has_run_session_end_tasks:\n return\n self._has_run_session_end_tasks = True\n\n for cb in self._on_ended_callbacks:\n try:\n cb()\n except Exception as e:\n print(\"Error in session on_ended callback: \" + str(e))\n\n self.app.remove_session(self)\n\n async def close(self, code: int = 1001) -> None:\n await self._conn.close(code, None)\n self._run_session_end_tasks()\n\n async def run(self) -> None:\n await self.send_message(\n {\"config\": {\"workerId\": \"\", \"sessionId\": str(self.id), \"user\": None}}\n )\n\n try:\n while True:\n message: str = await self._conn.receive()\n if self._debug:\n print(\"RECV: \" + message)\n\n try:\n message_obj = json.loads(message)\n except json.JSONDecodeError:\n print(\"ERROR: Invalid JSON message\")\n continue\n\n if \"method\" not in message_obj:\n self._send_error_response(\"Message does not contain 'method'.\")\n return\n\n if message_obj[\"method\"] == \"init\":\n message_obj = typing.cast(ClientMessageInit, message_obj)\n self._manage_inputs(message_obj[\"data\"])\n\n elif message_obj[\"method\"] == \"update\":\n message_obj = typing.cast(ClientMessageUpdate, message_obj)\n self._manage_inputs(message_obj[\"data\"])\n\n else:\n if \"tag\" not in message_obj:\n warnings.warn(\n \"Cannot dispatch message with missing 'tag'; method: \"\n + message_obj[\"method\"]\n )\n return\n if \"args\" not in message_obj:\n warnings.warn(\n \"Cannot dispatch message with missing 'args'; method: \"\n + message_obj[\"method\"]\n )\n return\n\n message_obj = typing.cast(ClientMessageOther, message_obj)\n await self._dispatch(message_obj)\n\n self.request_flush()\n\n await self.app.flush_pending_sessions()\n\n except ConnectionClosed:\n self._run_session_end_tasks()\n\n def _manage_inputs(self, data: Dict[str, object]) -> None:\n for (key, val) in data.items():\n keys = key.split(\":\")\n if len(keys) > 2:\n raise ValueError(\n \"Input name+type is not allowed to contain more than one ':' -- \"\n + key\n )\n if len(keys) == 2:\n val = input_handlers.process_value(keys[1], val, keys[0], self)\n\n self.input[keys[0]] = val\n\n # ==========================================================================\n # Message handlers\n # ==========================================================================\n\n async def _dispatch(self, message: ClientMessageOther) -> None:\n try:\n func = self._message_handlers[message[\"method\"]]\n except AttributeError:\n self._send_error_response(\"Unknown method: \" + message[\"method\"])\n return\n\n try:\n # TODO: handle `blobs`\n value: object = await func(*message[\"args\"])\n except Exception as e:\n self._send_error_response(\"Error: \" + str(e))\n return\n\n await self._send_response(message, value)\n\n async def _send_response(self, message: ClientMessageOther, value: object) -> None:\n await self.send_message({\"response\": {\"tag\": message[\"tag\"], \"value\": value}})\n\n # This is called during __init__.\n def _create_message_handlers(self) -> Dict[str, Callable[..., Awaitable[object]]]:\n async def uploadInit(file_infos: List[FileInfo]) -> Dict[str, object]:\n with session_context(self):\n if self._debug:\n print(\"Upload init: \" + str(file_infos))\n\n # TODO: Don't alter message in place?\n for fi in file_infos:\n if fi[\"type\"] == \"\":\n type = mimetypes.guess_type(fi[\"name\"])[0]\n fi[\"type\"] = type if type else \"application/octet-stream\"\n\n job_id = self._file_upload_manager.create_upload_operation(file_infos)\n worker_id = \"\"\n return {\n \"jobId\": job_id,\n \"uploadUrl\": f\"session/{self.id}/upload/{job_id}?w={worker_id}\",\n }\n\n async def uploadEnd(job_id: str, input_id: str) -> None:\n upload_op = self._file_upload_manager.get_upload_operation(job_id)\n if upload_op is None:\n warnings.warn(\n \"Received uploadEnd message for non-existent upload operation.\"\n )\n return None\n file_data = upload_op.finish()\n self.input[input_id] = file_data\n # Explicitly return None to signal that the message was handled.\n return None\n\n return {\n \"uploadInit\": uploadInit,\n \"uploadEnd\": uploadEnd,\n }\n\n # ==========================================================================\n # Handling /session/{session_id}/{subpath} requests\n # ==========================================================================\n async def handle_request(self, request: Request) -> Response:\n subpath: str = request.path_params[\"subpath\"] # type: ignore\n matches = re.search(\"^([a-z]+)/(.*)$\", subpath)\n\n if not matches:\n return HTMLResponse(\"

Bad Request

\", 400)\n\n if matches[1] == \"upload\" and request.method == \"POST\":\n # check that upload operation exists\n job_id = matches[2]\n upload_op = self._file_upload_manager.get_upload_operation(job_id)\n if not upload_op:\n return HTMLResponse(\"

Bad Request

\", 400)\n\n # The FileUploadOperation can have multiple files; each one will\n # have a separate POST request. Each call to `with upload_op` will\n # open up each file (in sequence) for writing.\n with upload_op:\n async for chunk in request.stream():\n upload_op.write_chunk(chunk)\n\n return PlainTextResponse(\"OK\", 200)\n\n return HTMLResponse(\"

Not Found

\", 404)\n\n # ==========================================================================\n # Outbound message handling\n # ==========================================================================\n def add_message_out(self, message: Dict[str, object]) -> None:\n self._message_queue_out.append(message)\n\n def get_messages_out(self) -> List[Dict[str, object]]:\n return self._message_queue_out\n\n def clear_messages_out(self) -> None:\n self._message_queue_out.clear()\n\n async def send_message(self, message: Dict[str, object]) -> None:\n message_str: str = json.dumps(message) + \"\\n\"\n if self._debug:\n print(\n \"SEND: \"\n + re.sub(\"(?m)base64,[a-zA-Z0-9+/=]+\", \"[base64 data]\", message_str),\n end=\"\",\n )\n await self._conn.send(json.dumps(message))\n\n def _send_error_response(self, message_str: str) -> None:\n print(\"_send_error_response: \" + message_str)\n pass\n\n # ==========================================================================\n # Flush\n # ==========================================================================\n def request_flush(self) -> None:\n self.app.request_flush(self)\n\n async def flush(self) -> None:\n values: Dict[str, object] = {}\n\n for value in self.get_messages_out():\n values.update(value)\n\n message: Dict[str, object] = {\n \"errors\": {},\n \"values\": values,\n \"inputMessages\": [],\n }\n\n try:\n await self.send_message(message)\n finally:\n self.clear_messages_out()\n\n # ==========================================================================\n # On session ended\n # ==========================================================================\n def on_ended(self, cb: Callable[[], None]) -> None:\n self._on_ended_callbacks.append(cb)\n\n # ==========================================================================\n # Misc\n # ==========================================================================\n async def unhandled_error(self, e: Exception) -> None:\n print(\"Unhandled error: \" + str(e))\n await self.close()\n\n\nclass Outputs:\n def __init__(self, session: ShinySession) -> None:\n self._output_obervers: Dict[str, Observer] = {}\n self._session: ShinySession = session\n\n def __call__(\n self, name: str\n ) -> Callable[[Union[Callable[[], object], render.RenderFunction]], None]:\n def set_fn(fn: Union[Callable[[], object], render.RenderFunction]) -> None:\n\n # fn is either a regular function or a RenderFunction object. If\n # it's the latter, we can give it a bit of metadata, which can be\n # used by the\n if isinstance(fn, render.RenderFunction):\n fn.set_metadata(self._session, name)\n\n if name in self._output_obervers:\n self._output_obervers[name].destroy()\n\n @ObserverAsync\n async def output_obs():\n await self._session.send_message(\n {\"recalculating\": {\"name\": name, \"status\": \"recalculating\"}}\n )\n\n message: Dict[str, object] = {}\n if utils.is_async_callable(fn):\n fn2 = typing.cast(Callable[[], Awaitable[object]], fn)\n val = await fn2()\n else:\n val = fn()\n message[name] = val\n self._session.add_message_out(message)\n\n await self._session.send_message(\n {\"recalculating\": {\"name\": name, \"status\": \"recalculated\"}}\n )\n\n self._output_obervers[name] = output_obs\n\n return None\n\n return set_fn\n\n\n# ==============================================================================\n# Context manager for current session (AKA current reactive domain)\n# ==============================================================================\n_current_session: ContextVar[Optional[ShinySession]] = ContextVar(\n \"current_session\", default=None\n)\n\n\ndef get_current_session() -> Optional[ShinySession]:\n return _current_session.get()\n\n\n@contextmanager\ndef session_context(session: Optional[ShinySession]):\n token: Token[Union[ShinySession, None]] = _current_session.set(session)\n try:\n yield\n finally:\n _current_session.reset(token)\n\n\ndef _require_active_session(session: Optional[ShinySession]) -> ShinySession:\n if session is None:\n session = get_current_session()\n if session is None:\n import inspect\n\n call_stack = inspect.stack()\n if len(call_stack) > 1:\n caller = call_stack[1]\n else:\n # Uncommon case: this function is called from the top-level, so the caller\n # is just _require_active_session.\n caller = call_stack[0]\n\n calling_fn_name = caller.function\n if calling_fn_name == \"__init__\":\n # If the caller is __init__, then we're most likely in the initialization of\n # an object. This will get the class name.\n calling_fn_name = caller.frame.f_locals[\"self\"].__class__.__name__\n\n raise RuntimeError(\n f\"{calling_fn_name}() must be called from within an active Shiny session.\"\n )\n return session\n\n\n# ==============================================================================\n# Miscellaneous functions\n# ==============================================================================\n\n\nclass _RenderedDeps(TypedDict):\n deps: List[Dict[str, Any]]\n html: str\n\n\ndef _process_deps(\n ui: TagChildArg, session: Optional[ShinySession] = None\n) -> _RenderedDeps:\n\n session = _require_active_session(session)\n\n res = TagList(ui).render()\n deps: List[Dict[str, Any]] = []\n for dep in res[\"dependencies\"]:\n session.app.register_web_dependency(dep)\n dep_dict = dep.as_dict(lib_prefix=session.app.LIB_PREFIX)\n deps.append(dep_dict)\n\n return {\"deps\": deps, \"html\": res[\"html\"]}\n\nshiny/shinymodule.py METASEP\n__all__ = (\n \"ReactiveValuesProxy\",\n \"OutputsProxy\",\n \"ShinySessionProxy\",\n \"ShinyModule\",\n)\n\nfrom typing import Optional, Union, Callable, Any\n\nfrom htmltools.core import TagChildArg\n\nfrom .shinysession import ShinySession, Outputs, _require_active_session\nfrom .reactives import ReactiveValues\nfrom .render import RenderFunction\n\n\nclass ReactiveValuesProxy(ReactiveValues):\n def __init__(self, ns: str, values: ReactiveValues):\n self._ns: str = ns\n self._values: ReactiveValues = values\n\n def _ns_key(self, key: str) -> str:\n return self._ns + \"-\" + key\n\n def __setitem__(self, key: str, value: object) -> None:\n self._values[self._ns_key(key)] = value\n\n def __getitem__(self, key: str) -> object:\n return self._values[self._ns_key(key)]\n\n def __delitem__(self, key: str) -> None:\n del self._values[self._ns_key(key)]\n\n\nclass OutputsProxy(Outputs):\n def __init__(self, ns: str, outputs: Outputs):\n self._ns: str = ns\n self._outputs: Outputs = outputs\n\n def _ns_key(self, key: str) -> str:\n return self._ns + \"-\" + key\n\n def __call__(\n self, name: str\n ) -> Callable[[Union[Callable[[], object], RenderFunction]], None]:\n return self._outputs(self._ns_key(name))\n\n\nclass ShinySessionProxy(ShinySession):\n def __init__(self, ns: str, parent_session: ShinySession) -> None:\n self._ns: str = ns\n self._parent: ShinySession = parent_session\n self.input: ReactiveValuesProxy = ReactiveValuesProxy(ns, parent_session.input)\n self.output: OutputsProxy = OutputsProxy(ns, parent_session.output)\n\n\nclass ShinyModule:\n def __init__(\n self,\n ui: Callable[..., TagChildArg],\n server: Callable[[ShinySessionProxy], None],\n ) -> None:\n self._ui: Callable[..., TagChildArg] = ui\n self._server: Callable[[ShinySessionProxy], None] = server\n\n def ui(self, namespace: str, *args: Any) -> TagChildArg:\n ns = ShinyModule._make_ns_fn(namespace)\n return self._ui(ns, *args)\n\n def server(self, ns: str, *, session: Optional[ShinySession] = None) -> None:\n self.ns: str = ns\n session = _require_active_session(session)\n session_proxy = ShinySessionProxy(ns, session)\n self._server(session_proxy)\n\n @staticmethod\n def _make_ns_fn(namespace: str) -> Callable[[str], str]:\n def ns_fn(id: str) -> str:\n return namespace + \"-\" + id\n\n return ns_fn\n\nshiny/shinyapp.py METASEP\n__all__ = (\"ShinyApp\",)\n\nfrom typing import Any, List, Optional, Union, Dict, Callable, cast\n\nfrom htmltools import Tag, TagList, HTMLDocument, HTMLDependency, RenderedHTML\n\nimport starlette.routing\nimport starlette.websockets\nfrom starlette.types import Message, Receive, Scope, Send\nfrom starlette.requests import Request\nfrom starlette.responses import Response, HTMLResponse, JSONResponse\n\nfrom .http_staticfiles import StaticFiles\nfrom .shinysession import ShinySession, session_context\nfrom . import reactcore\nfrom .connmanager import (\n Connection,\n StarletteConnection,\n)\nfrom .html_dependencies import jquery_deps, shiny_deps\n\n\nclass ShinyApp:\n LIB_PREFIX = \"lib/\"\n\n def __init__(\n self,\n ui: Union[Tag, TagList],\n server: Callable[[ShinySession], None],\n *,\n debug: bool = False,\n ) -> None:\n self.ui: RenderedHTML = _render_page(ui, lib_prefix=self.LIB_PREFIX)\n self.server: Callable[[ShinySession], None] = server\n\n self._debug: bool = debug\n\n self._sessions: Dict[str, ShinySession] = {}\n self._last_session_id: int = 0 # Counter for generating session IDs\n\n self._sessions_needing_flush: Dict[int, ShinySession] = {}\n\n self._registered_dependencies: Dict[str, HTMLDependency] = {}\n self._dependency_handler: Any = starlette.routing.Router()\n\n self.starlette_app = starlette.routing.Router(\n routes=[\n starlette.routing.WebSocketRoute(\"/websocket/\", self._on_connect_cb),\n starlette.routing.Route(\"/\", self._on_root_request_cb, methods=[\"GET\"]),\n starlette.routing.Route(\n \"/session/{session_id}/{subpath:path}\",\n self._on_session_request_cb,\n methods=[\"GET\", \"POST\"],\n ),\n starlette.routing.Mount(\"/\", app=self._dependency_handler),\n ]\n )\n\n def create_session(self, conn: Connection) -> ShinySession:\n self._last_session_id += 1\n id = str(self._last_session_id)\n session = ShinySession(self, id, conn, debug=self._debug)\n self._sessions[id] = session\n return session\n\n def remove_session(self, session: Union[ShinySession, str]) -> None:\n if isinstance(session, ShinySession):\n session = session.id\n\n if self._debug:\n print(f\"remove_session: {session}\")\n del self._sessions[session]\n\n def run(self, debug: Optional[bool] = None) -> None:\n import uvicorn # type: ignore\n\n if debug is not None:\n self._debug = debug\n uvicorn.run(cast(Any, self), host=\"0.0.0.0\", port=8000)\n\n # ASGI entrypoint. Handles HTTP, WebSocket, and lifespan.\n async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:\n await self.starlette_app(scope, receive, send)\n\n async def call_pyodide(self, scope: Scope, receive: Receive, send: Send) -> None:\n # TODO: Pretty sure there are objects that need to be destroy()'d here?\n scope = cast(Any, scope).to_py()\n\n # ASGI requires some values to be byte strings, not character strings. Those are\n # not that easy to create in JavaScript, so we let the JS side pass us strings\n # and we convert them to bytes here.\n if \"headers\" in scope:\n # JS doesn't have `bytes` so we pass as strings and convert here\n scope[\"headers\"] = [\n [value.encode(\"latin-1\") for value in header]\n for header in scope[\"headers\"]\n ]\n if \"query_string\" in scope and scope[\"query_string\"]:\n scope[\"query_string\"] = scope[\"query_string\"].encode(\"latin-1\")\n if \"raw_path\" in scope and scope[\"raw_path\"]:\n scope[\"raw_path\"] = scope[\"raw_path\"].encode(\"latin-1\")\n\n async def rcv() -> Message:\n event = await receive()\n return cast(Message, cast(Any, event).to_py())\n\n async def snd(event: Message):\n await send(event)\n\n await self(scope, rcv, snd)\n\n async def stop(self) -> None:\n # Close all sessions (convert to list to avoid modifying the dict while\n # iterating over it, which throws an error).\n for session in list(self._sessions.values()):\n await session.close()\n\n # ==========================================================================\n # Connection callbacks\n # ==========================================================================\n async def _on_root_request_cb(self, request: Request) -> Response:\n \"\"\"\n Callback passed to the ConnectionManager which is invoked when a HTTP\n request for / occurs.\n \"\"\"\n self._ensure_web_dependencies(self.ui[\"dependencies\"])\n return HTMLResponse(content=self.ui[\"html\"])\n\n async def _on_connect_cb(self, ws: starlette.websockets.WebSocket) -> None:\n \"\"\"\n Callback which is invoked when a new WebSocket connection is established.\n \"\"\"\n await ws.accept()\n conn = StarletteConnection(ws)\n session = self.create_session(conn)\n\n await session.run()\n\n async def _on_session_request_cb(self, request: Request) -> Response:\n \"\"\"\n Callback passed to the ConnectionManager which is invoked when a HTTP\n request for /session/* occurs.\n \"\"\"\n session_id: str = request.path_params[\"session_id\"] # type: ignore\n # subpath: str = request.path_params[\"subpath\"]\n\n if session_id in self._sessions:\n session: ShinySession = self._sessions[session_id]\n with session_context(session):\n return await session.handle_request(request)\n\n return JSONResponse({\"detail\": \"Not Found\"}, status_code=404)\n\n # ==========================================================================\n # Flush\n # ==========================================================================\n def request_flush(self, session: ShinySession) -> None:\n # TODO: Until we have reactive domains, because we can't yet keep track\n # of which sessions need a flush.\n pass\n # self._sessions_needing_flush[session.id] = session\n\n async def flush_pending_sessions(self) -> None:\n await reactcore.flush()\n\n # TODO: Until we have reactive domains, flush all sessions (because we\n # can't yet keep track of which ones need a flush)\n for _, session in self._sessions.items():\n await session.flush()\n # for id, session in self._sessions_needing_flush.items():\n # await session.flush()\n # del self._sessions_needing_flush[id]\n\n # ==========================================================================\n # HTML Dependency stuff\n # ==========================================================================\n def _ensure_web_dependencies(self, deps: List[HTMLDependency]) -> None:\n for dep in deps:\n self.register_web_dependency(dep)\n\n def register_web_dependency(self, dep: HTMLDependency) -> None:\n if (\n dep.name in self._registered_dependencies\n and dep.version >= self._registered_dependencies[dep.name].version\n ):\n return\n\n paths = dep.source_path_map(lib_prefix=self.LIB_PREFIX)\n self._dependency_handler.mount(\n \"/\" + paths[\"href\"],\n StaticFiles(directory=paths[\"source\"]),\n name=dep.name + \"-\" + str(dep.version),\n )\n self._registered_dependencies[dep.name] = dep\n\n\ndef _render_page(ui: Union[Tag, TagList], lib_prefix: str) -> RenderedHTML:\n doc = HTMLDocument(TagList(jquery_deps(), shiny_deps(), ui))\n return doc.render(lib_prefix=lib_prefix)\n\nshiny/render.py METASEP\nimport sys\nimport os\nimport io\nimport base64\nimport mimetypes\nimport inspect\nfrom typing import TYPE_CHECKING, Callable, Optional, Awaitable, Union\nimport typing\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\n\nif sys.version_info >= (3, 8):\n from typing import TypedDict\nelse:\n from typing_extensions import TypedDict\n\nfrom htmltools import TagChildArg\n\nif TYPE_CHECKING:\n from .shinysession import ShinySession\n\nfrom . import utils\n\n__all__ = (\n \"render_plot\",\n \"render_image\",\n \"render_ui\",\n)\n\nUserRenderFunction = Callable[[], object]\nUserRenderFunctionAsync = Callable[[], Awaitable[object]]\n\n\nclass ImgData(TypedDict):\n src: str\n width: Union[str, float]\n height: Union[str, float]\n alt: Optional[str]\n\n\nImgRenderFunc = Callable[[], ImgData]\nImgRenderFuncAsync = Callable[[], Awaitable[ImgData]]\n\n\nclass RenderFunction:\n def __init__(self, fn: UserRenderFunction) -> None:\n raise NotImplementedError\n\n def __call__(self) -> object:\n raise NotImplementedError\n\n def set_metadata(self, session: \"ShinySession\", name: str) -> None:\n \"\"\"When RenderFunctions are assigned to Output object slots, this method\n is used to pass along session and name information.\n \"\"\"\n self._session: ShinySession = session\n self._name: str = name\n\n\nclass RenderFunctionAsync(RenderFunction):\n async def __call__(self) -> object:\n raise NotImplementedError\n\n\nclass RenderPlot(RenderFunction):\n _ppi: float = 96\n\n def __init__(self, fn: UserRenderFunction, alt: Optional[str] = None) -> None:\n self._fn: UserRenderFunctionAsync = utils.wrap_async(fn)\n self._alt: Optional[str] = alt\n\n def __call__(self) -> object:\n return utils.run_coro_sync(self.run())\n\n async def run(self) -> object:\n # Reactively read some information about the plot.\n pixelratio: float = typing.cast(\n float, self._session.input[\".clientdata_pixelratio\"]\n )\n width: float = typing.cast(\n float, self._session.input[f\".clientdata_output_{self._name}_width\"]\n )\n height: float = typing.cast(\n float, self._session.input[f\".clientdata_output_{self._name}_height\"]\n )\n\n fig = await self._fn()\n\n if fig is None:\n return None\n\n # Try each type of renderer in turn. The reason we do it this way is to avoid\n # importing modules that aren't already loaded. That could slow things down, or\n # worse, cause an error if the module isn't installed.\n #\n # Each try_render function should return either an ImgResult, None (which\n # indicates that the rendering failed), or the string \"TYPE_MISMATCH\" (which\n # indicate that `fig` object was not the type of object that the renderer knows\n # how to handle). In the case of a \"TYPE_MISMATCH\", it will move on to the next\n # renderer.\n result: Union[ImgData, None, Literal[\"TYPE_MISMATCH\"]] = None\n if \"matplotlib\" in sys.modules:\n result = try_render_plot_matplotlib(\n fig, width, height, pixelratio, self._ppi\n )\n if result != \"TYPE_MISMATCH\":\n return result\n\n if \"PIL\" in sys.modules:\n result = try_render_plot_pil(fig, width, height, pixelratio, self._ppi)\n if result != \"TYPE_MISMATCH\":\n return result\n\n raise Exception(\"Unsupported figure type: \" + str(type(fig)))\n\n\nclass RenderPlotAsync(RenderPlot, RenderFunctionAsync):\n def __init__(self, fn: UserRenderFunctionAsync, alt: Optional[str] = None) -> None:\n if not inspect.iscoroutinefunction(fn):\n raise TypeError(\"PlotAsync requires an async function\")\n\n # Init the Plot base class with a placeholder synchronous function so it\n # won't throw an error, then replace it with the async function.\n super().__init__(lambda: None, alt)\n self._fn: UserRenderFunctionAsync = fn\n\n async def __call__(self) -> object:\n return await self.run()\n\n\ndef render_plot(alt: Optional[str] = None):\n def wrapper(fn: Union[UserRenderFunction, UserRenderFunctionAsync]) -> RenderPlot:\n if inspect.iscoroutinefunction(fn):\n fn = typing.cast(UserRenderFunctionAsync, fn)\n return RenderPlotAsync(fn, alt=alt)\n else:\n return RenderPlot(fn, alt=alt)\n\n return wrapper\n\n\n# Try to render a matplotlib object. If `fig` is not a matplotlib object, return\n# \"TYPE_MISMATCH\". If there's an error in rendering, return None. If successful in\n# rendering, return an ImgData object.\ndef try_render_plot_matplotlib(\n fig: object,\n width: float,\n height: float,\n pixelratio: float,\n ppi: float,\n alt: Optional[str] = None,\n) -> Union[ImgData, None, Literal[\"TYPE_MISMATCH\"]]:\n import matplotlib.figure\n import matplotlib.pyplot\n\n if isinstance(fig, matplotlib.figure.Figure):\n try:\n fig.set_dpi(ppi * pixelratio)\n fig.set_size_inches(width / ppi, height / ppi)\n\n with io.BytesIO() as buf:\n fig.savefig(buf, format=\"png\")\n buf.seek(0)\n data = base64.b64encode(buf.read())\n data_str = data.decode(\"utf-8\")\n\n res: ImgData = {\n \"src\": \"data:image/png;base64,\" + data_str,\n \"width\": width,\n \"height\": height,\n \"alt\": alt,\n }\n\n return res\n\n except Exception as e:\n # TODO: just let errors propagate?\n print(\"Error rendering matplotlib object: \" + str(e))\n\n finally:\n matplotlib.pyplot.close(fig)\n\n return None\n\n else:\n return \"TYPE_MISMATCH\"\n\n\ndef try_render_plot_pil(\n fig: object,\n width: float,\n height: float,\n pixelratio: float,\n ppi: float,\n alt: Optional[str] = None,\n) -> Union[ImgData, None, Literal[\"TYPE_MISMATCH\"]]:\n import PIL.Image\n\n if isinstance(fig, PIL.Image.Image):\n try:\n with io.BytesIO() as buf:\n fig.save(buf, format=\"PNG\")\n buf.seek(0)\n data = base64.b64encode(buf.read())\n data_str = data.decode(\"utf-8\")\n\n res: ImgData = {\n \"src\": \"data:image/png;base64,\" + data_str,\n \"width\": width,\n \"height\": height,\n \"alt\": alt,\n }\n\n return res\n\n except Exception as e:\n # TODO: just let errors propagate?\n print(\"Error rendering PIL object: \" + str(e))\n\n return None\n\n else:\n return \"TYPE_MISMATCH\"\n\n\nclass RenderImage(RenderFunction):\n def __init__(self, fn: ImgRenderFunc, delete_file: bool = False) -> None:\n self._fn: ImgRenderFuncAsync = utils.wrap_async(fn)\n self._delete_file: bool = delete_file\n\n def __call__(self) -> object:\n return utils.run_coro_sync(self.run())\n\n async def run(self) -> object:\n res: ImgData = await self._fn()\n src: str = res.get(\"src\")\n try:\n with open(src, \"rb\") as f:\n data = base64.b64encode(f.read())\n data_str = data.decode(\"utf-8\")\n content_type = mimetypes.guess_type(src)[1]\n res[\"src\"] = f\"data:{content_type};base64,{data_str}\"\n return res\n finally:\n if self._delete_file:\n os.remove(src)\n\n\nclass RenderImageAsync(RenderImage, RenderFunctionAsync):\n def __init__(self, fn: ImgRenderFuncAsync, delete_file: bool = False) -> None:\n if not inspect.iscoroutinefunction(fn):\n raise TypeError(\"PlotAsync requires an async function\")\n # Init the Plot base class with a placeholder synchronous function so it\n # won't throw an error, then replace it with the async function.\n super().__init__(lambda: None, delete_file)\n self._fn: ImgRenderFuncAsync = fn\n\n async def __call__(self) -> object:\n return await self.run()\n\n\ndef render_image(delete_file: bool = False):\n def wrapper(fn: Union[ImgRenderFunc, ImgRenderFuncAsync]) -> RenderImage:\n if inspect.iscoroutinefunction(fn):\n fn = typing.cast(ImgRenderFuncAsync, fn)\n return RenderImageAsync(fn, delete_file=delete_file)\n else:\n fn = typing.cast(ImgRenderFunc, fn)\n return RenderImage(fn, delete_file=delete_file)\n\n return wrapper\n\n\nUiRenderFunc = Callable[[], TagChildArg]\nUiRenderFuncAsync = Callable[[], Awaitable[TagChildArg]]\n\n\nclass RenderUI(RenderFunction):\n def __init__(self, fn: UiRenderFunc) -> None:\n self._fn: UiRenderFuncAsync = utils.wrap_async(fn)\n\n def __call__(self) -> object:\n return utils.run_coro_sync(self.run())\n\n async def run(self) -> object:\n ui: TagChildArg = await self._fn()\n if ui is None:\n return None\n # TODO: better a better workaround for the circular dependency\n from .shinysession import _process_deps\n\n return _process_deps(ui, self._session)\n\n\nclass RenderUIAsync(RenderUI, RenderFunctionAsync):\n def __init__(self, fn: UiRenderFuncAsync) -> None:\n if not inspect.iscoroutinefunction(fn):\n raise TypeError(\"PlotAsync requires an async function\")\n\n super().__init__(lambda: None)\n self._fn: UiRenderFuncAsync = fn\n\n async def __call__(self) -> object:\n return await self.run()\n\n\ndef render_ui():\n def wrapper(fn: Union[UiRenderFunc, UiRenderFuncAsync]) -> RenderUI:\n if inspect.iscoroutinefunction(fn):\n fn = typing.cast(UiRenderFuncAsync, fn)\n return RenderUIAsync(fn)\n else:\n fn = typing.cast(UiRenderFunc, fn)\n return RenderUI(fn)\n\n return wrapper\n\nshiny/reactives.py METASEP\n\"\"\"Reactive components\"\"\"\n\n__all__ = (\n \"ReactiveVal\",\n \"ReactiveValues\",\n \"Reactive\",\n \"ReactiveAsync\",\n \"reactive\",\n \"reactive_async\",\n \"Observer\",\n \"ObserverAsync\",\n \"observe\",\n \"observe_async\",\n \"isolate\",\n \"isolate_async\",\n)\n\nfrom typing import (\n TYPE_CHECKING,\n Optional,\n Callable,\n Awaitable,\n TypeVar,\n Union,\n Generic,\n Any,\n overload,\n)\nimport typing\nimport inspect\n\nfrom .reactcore import Context, Dependents\nfrom . import reactcore\nfrom . import utils\nfrom .types import MISSING, MISSING_TYPE\n\nif TYPE_CHECKING:\n from .shinysession import ShinySession\n\nT = TypeVar(\"T\")\n\n# ==============================================================================\n# ReactiveVal and ReactiveValues\n# ==============================================================================\nclass ReactiveVal(Generic[T]):\n def __init__(self, value: T) -> None:\n self._value: T = value\n self._dependents: Dependents = Dependents()\n\n @overload\n def __call__(self) -> T:\n ...\n\n @overload\n def __call__(self, value: T) -> bool:\n ...\n\n def __call__(self, value: Union[MISSING_TYPE, T] = MISSING) -> Union[T, bool]:\n if isinstance(value, MISSING_TYPE):\n return self.get()\n else:\n return self.set(value)\n\n def get(self) -> T:\n self._dependents.register()\n return self._value\n\n def set(self, value: T) -> bool:\n if self._value is value:\n return False\n\n self._value = value\n self._dependents.invalidate()\n return True\n\n\nclass ReactiveValues:\n def __init__(self, **kwargs: object) -> None:\n self._map: dict[str, ReactiveVal[Any]] = {}\n for key, value in kwargs.items():\n self._map[key] = ReactiveVal(value)\n\n def __setitem__(self, key: str, value: object) -> None:\n if key in self._map:\n self._map[key](value)\n else:\n self._map[key] = ReactiveVal(value)\n\n def __getitem__(self, key: str) -> Any:\n # Auto-populate key if accessed but not yet set. Needed to take reactive\n # dependencies on input values that haven't been received from client\n # yet.\n if key not in self._map:\n self._map[key] = ReactiveVal(None)\n\n return self._map[key]()\n\n def __delitem__(self, key: str) -> None:\n del self._map[key]\n\n\n# ==============================================================================\n# Reactive\n# ==============================================================================\nclass Reactive(Generic[T]):\n def __init__(\n self,\n func: Callable[[], T],\n *,\n session: Union[MISSING_TYPE, \"ShinySession\", None] = MISSING,\n ) -> None:\n if inspect.iscoroutinefunction(func):\n raise TypeError(\"Reactive requires a non-async function\")\n\n self._func: Callable[[], Awaitable[T]] = utils.wrap_async(func)\n self._is_async: bool = False\n\n self._dependents: Dependents = Dependents()\n self._invalidated: bool = True\n self._running: bool = False\n self._most_recent_ctx_id: int = -1\n self._ctx: Optional[Context] = None\n self._exec_count: int = 0\n\n self._session: Optional[ShinySession]\n # Use `isinstance(x, MISSING_TYPE)`` instead of `x is MISSING` because\n # the type checker doesn't know that MISSING is the only instance of\n # MISSING_TYPE; this saves us from casting later on.\n if isinstance(session, MISSING_TYPE):\n # If no session is provided, autodetect the current session (this\n # could be None if outside of a session).\n session = shinysession.get_current_session()\n self._session = session\n\n # Use lists to hold (optional) value and error, instead of Optional[T],\n # because it makes typing more straightforward. For example if\n # .get_value() simply returned self._value, self._value had type\n # Optional[T], then the return type for get_value() would have to be\n # Optional[T].\n self._value: list[T] = []\n self._error: list[Exception] = []\n\n def __call__(self) -> T:\n # Run the Coroutine (synchronously), and then return the value.\n # If the Coroutine yields control, then an error will be raised.\n return utils.run_coro_sync(self.get_value())\n\n async def get_value(self) -> T:\n self._dependents.register()\n\n if self._invalidated or self._running:\n await self.update_value()\n\n if self._error:\n raise self._error[0]\n\n return self._value[0]\n\n async def update_value(self) -> None:\n self._ctx = Context()\n self._most_recent_ctx_id = self._ctx.id\n\n self._ctx.on_invalidate(self._on_invalidate_cb)\n\n self._exec_count += 1\n self._invalidated = False\n\n was_running = self._running\n self._running = True\n\n with shinysession.session_context(self._session):\n try:\n await self._ctx.run(self._run_func, create_task=self._is_async)\n finally:\n self._running = was_running\n\n def _on_invalidate_cb(self) -> None:\n self._invalidated = True\n self._value.clear() # Allow old value to be GC'd\n self._dependents.invalidate()\n self._ctx = None # Allow context to be GC'd\n\n async def _run_func(self) -> None:\n self._error.clear()\n try:\n self._value.append(await self._func())\n except Exception as err:\n self._error.append(err)\n\n\nclass ReactiveAsync(Reactive[T]):\n def __init__(\n self,\n func: Callable[[], Awaitable[T]],\n *,\n session: Union[MISSING_TYPE, \"ShinySession\", None] = MISSING,\n ) -> None:\n if not inspect.iscoroutinefunction(func):\n raise TypeError(\"ReactiveAsync requires an async function\")\n\n # Init the Reactive base class with a placeholder synchronous function\n # so it won't throw an error, then replace it with the async function.\n # Need the `cast` to satisfy the type checker.\n super().__init__(lambda: typing.cast(T, None), session=session)\n self._func: Callable[[], Awaitable[T]] = func\n self._is_async = True\n\n async def __call__(self) -> T:\n return await self.get_value()\n\n\ndef reactive(\n *, session: Union[MISSING_TYPE, \"ShinySession\", None] = MISSING\n) -> Callable[[Callable[[], T]], Reactive[T]]:\n def create_reactive(fn: Callable[[], T]) -> Reactive[T]:\n return Reactive(fn, session=session)\n\n return create_reactive\n\n\ndef reactive_async(\n *, session: Union[MISSING_TYPE, \"ShinySession\", None] = MISSING\n) -> Callable[[Callable[[], Awaitable[T]]], ReactiveAsync[T]]:\n def create_reactive_async(fn: Callable[[], Awaitable[T]]) -> ReactiveAsync[T]:\n return ReactiveAsync(fn, session=session)\n\n return create_reactive_async\n\n\n# ==============================================================================\n# Observer\n# ==============================================================================\nclass Observer:\n def __init__(\n self,\n func: Callable[[], None],\n *,\n session: Union[MISSING_TYPE, \"ShinySession\", None] = MISSING,\n priority: int = 0,\n ) -> None:\n if inspect.iscoroutinefunction(func):\n raise TypeError(\"Observer requires a non-async function\")\n\n self._func: Callable[[], Awaitable[None]] = utils.wrap_async(func)\n self._is_async: bool = False\n\n self._priority: int = priority\n\n self._invalidate_callbacks: list[Callable[[], None]] = []\n self._destroyed: bool = False\n self._ctx: Optional[Context] = None\n self._exec_count: int = 0\n\n self._session: Optional[ShinySession]\n # Use `isinstance(x, MISSING_TYPE)`` instead of `x is MISSING` because\n # the type checker doesn't know that MISSING is the only instance of\n # MISSING_TYPE; this saves us from casting later on.\n if isinstance(session, MISSING_TYPE):\n # If no session is provided, autodetect the current session (this\n # could be None if outside of a session).\n session = shinysession.get_current_session()\n self._session = session\n\n if self._session is not None:\n self._session.on_ended(self._on_session_ended_cb)\n\n # Defer the first running of this until flushReact is called\n self._create_context().invalidate()\n\n def _create_context(self) -> Context:\n ctx = Context()\n\n # Store the context explicitly in Observer object\n # TODO: More explanation here\n self._ctx = ctx\n\n def on_invalidate_cb() -> None:\n # Context is invalidated, so we don't need to store a reference to it\n # anymore.\n self._ctx = None\n\n for cb in self._invalidate_callbacks:\n cb()\n\n # TODO: Wrap this stuff up in a continue callback, depending on if suspended?\n ctx.add_pending_flush(self._priority)\n\n async def on_flush_cb() -> None:\n if not self._destroyed:\n await self.run()\n\n ctx.on_invalidate(on_invalidate_cb)\n ctx.on_flush(on_flush_cb)\n\n return ctx\n\n async def run(self) -> None:\n ctx = self._create_context()\n self._exec_count += 1\n\n with shinysession.session_context(self._session):\n try:\n await ctx.run(self._func, create_task=self._is_async)\n except Exception as e:\n if self._session:\n await self._session.unhandled_error(e)\n\n def on_invalidate(self, callback: Callable[[], None]) -> None:\n self._invalidate_callbacks.append(callback)\n\n def destroy(self) -> None:\n self._destroyed = True\n\n if self._ctx is not None:\n self._ctx.invalidate()\n\n def _on_session_ended_cb(self) -> None:\n self.destroy()\n\n\nclass ObserverAsync(Observer):\n def __init__(\n self,\n func: Callable[[], Awaitable[None]],\n *,\n session: Union[MISSING_TYPE, \"ShinySession\", None] = MISSING,\n priority: int = 0,\n ) -> None:\n if not inspect.iscoroutinefunction(func):\n raise TypeError(\"ObserverAsync requires an async function\")\n\n # Init the Observer base class with a placeholder synchronous function\n # so it won't throw an error, then replace it with the async function.\n super().__init__(lambda: None, session=session, priority=priority)\n self._func: Callable[[], Awaitable[None]] = func\n self._is_async = True\n\n\ndef observe(\n *, priority: int = 0, session: Union[MISSING_TYPE, \"ShinySession\", None] = MISSING\n) -> Callable[[Callable[[], None]], Observer]:\n def create_observer(fn: Callable[[], None]) -> Observer:\n return Observer(fn, priority=priority, session=session)\n\n return create_observer\n\n\ndef observe_async(\n *, priority: int = 0, session: Union[MISSING_TYPE, \"ShinySession\", None] = MISSING\n) -> Callable[[Callable[[], Awaitable[None]]], ObserverAsync]:\n def create_observer_async(fn: Callable[[], Awaitable[None]]) -> ObserverAsync:\n return ObserverAsync(fn, priority=priority, session=session)\n\n return create_observer_async\n\n\n# ==============================================================================\n# Miscellaneous functions\n# ==============================================================================\ndef isolate(func: Callable[[], T]) -> T:\n # The `object` in func's type definition also encompasses Awaitable[object],\n # so add a runtime check to make sure that this hasn't been called with an\n # async function.\n if inspect.iscoroutinefunction(func):\n raise TypeError(\"isolate() requires a non-async function\")\n\n func_async: Callable[[], Awaitable[T]] = utils.wrap_async(func)\n ctx: Context = reactcore.Context()\n try:\n return utils.run_coro_sync(ctx.run(func_async, create_task=False))\n finally:\n ctx.invalidate()\n\n\nasync def isolate_async(func: Callable[[], Awaitable[T]]) -> T:\n ctx: Context = reactcore.Context()\n try:\n return await ctx.run(func, create_task=True)\n finally:\n ctx.invalidate()\n\n\n# Import here at the bottom seems to fix a circular dependency problem.\nfrom . import shinysession\n\nshiny/reactcore.py METASEP\n\"\"\"Low-level reactive components.\"\"\"\n\n\nfrom typing import Callable, Optional, Awaitable, TypeVar\nfrom contextvars import ContextVar\nfrom asyncio import Task\nimport asyncio\n\nfrom .datastructures import PriorityQueueFIFO\n\nT = TypeVar(\"T\")\n\n\nclass Context:\n \"\"\"A reactive context\"\"\"\n\n def __init__(self) -> None:\n self.id: int = _reactive_environment.next_id()\n self._invalidated: bool = False\n self._invalidate_callbacks: list[Callable[[], None]] = []\n self._flush_callbacks: list[Callable[[], Awaitable[None]]] = []\n\n async def run(self, func: Callable[[], Awaitable[T]], create_task: bool) -> T:\n \"\"\"Run the provided function in this context\"\"\"\n env = _reactive_environment\n return await env.run_with(self, func, create_task)\n\n def invalidate(self) -> None:\n \"\"\"Invalidate this context. It will immediately call the callbacks\n that have been registered with onInvalidate().\"\"\"\n\n if self._invalidated:\n return\n\n self._invalidated = True\n\n for cb in self._invalidate_callbacks:\n cb()\n\n self._invalidate_callbacks.clear()\n\n def on_invalidate(self, func: Callable[[], None]) -> None:\n \"\"\"Register a function to be called when this context is invalidated\"\"\"\n if self._invalidated:\n func()\n else:\n self._invalidate_callbacks.append(func)\n\n def add_pending_flush(self, priority: int) -> None:\n \"\"\"Tell the reactive environment that this context should be flushed the\n next time flushReact() called.\"\"\"\n _reactive_environment.add_pending_flush(self, priority)\n\n def on_flush(self, func: Callable[[], Awaitable[None]]) -> None:\n \"\"\"Register a function to be called when this context is flushed.\"\"\"\n self._flush_callbacks.append(func)\n\n async def execute_flush_callbacks(self) -> None:\n \"\"\"Execute all flush callbacks\"\"\"\n for cb in self._flush_callbacks:\n try:\n await cb()\n finally:\n pass\n\n self._flush_callbacks.clear()\n\n\nclass Dependents:\n def __init__(self) -> None:\n self._dependents: dict[int, Context] = {}\n\n def register(self) -> None:\n ctx: Context = get_current_context()\n if ctx.id not in self._dependents:\n self._dependents[ctx.id] = ctx\n\n def on_invalidate_cb() -> None:\n if ctx.id in self._dependents:\n del self._dependents[ctx.id]\n\n ctx.on_invalidate(on_invalidate_cb)\n\n def invalidate(self) -> None:\n # TODO: Check sort order\n for id in sorted(self._dependents.keys()):\n ctx = self._dependents[id]\n ctx.invalidate()\n\n\nclass ReactiveEnvironment:\n \"\"\"The reactive environment\"\"\"\n\n def __init__(self) -> None:\n self._current_context: ContextVar[Optional[Context]] = ContextVar(\n \"current_context\", default=None\n )\n self._next_id: int = 0\n self._pending_flush_queue: PriorityQueueFIFO[Context] = PriorityQueueFIFO()\n\n def next_id(self) -> int:\n \"\"\"Return the next available id\"\"\"\n id = self._next_id\n self._next_id += 1\n return id\n\n def current_context(self) -> Context:\n \"\"\"Return the current Context object\"\"\"\n ctx = self._current_context.get()\n if ctx is None:\n raise RuntimeError(\"No current reactive context\")\n return ctx\n\n async def run_with(\n self, ctx: Context, context_func: Callable[[], Awaitable[T]], create_task: bool\n ) -> T:\n async def wrapper() -> T:\n old = self._current_context.set(ctx)\n try:\n return await context_func()\n finally:\n self._current_context.reset(old)\n\n if not create_task:\n return await wrapper()\n else:\n return await asyncio.create_task(wrapper())\n\n async def flush(self, *, concurrent: bool = True) -> None:\n \"\"\"Flush all pending operations\"\"\"\n # Currently, we default to concurrent flush. In the future, we'll\n # probably remove the option and just do it one way or the other. For a\n # concurrent flush, there are still some issues that need to be\n # resolved.\n if concurrent:\n await self._flush_concurrent()\n else:\n await self._flush_sequential()\n\n async def _flush_concurrent(self) -> None:\n # Flush observers concurrently, using Tasks.\n tasks: list[Task[None]] = []\n\n # Double-nest the check for self._pending_flush because it is possible\n # that running a flush callback (in the gather()) will add another thing\n # to the pending flush list (like if an observer sets a reactive value,\n # which in turn invalidates other reactives/observers).\n while not self._pending_flush_queue.empty():\n while not self._pending_flush_queue.empty():\n # Take the first element\n ctx = self._pending_flush_queue.get()\n\n try:\n task: Task[None] = asyncio.create_task(\n ctx.execute_flush_callbacks()\n )\n tasks.append(task)\n finally:\n pass\n\n await asyncio.gather(*tasks)\n\n async def _flush_sequential(self) -> None:\n # Sequential flush: instead of storing the tasks in a list and\n # calling gather() on them later, just run each observer in\n # sequence.\n while not self._pending_flush_queue.empty():\n ctx = self._pending_flush_queue.get()\n try:\n await ctx.execute_flush_callbacks()\n finally:\n pass\n\n def add_pending_flush(self, ctx: Context, priority: int) -> None:\n self._pending_flush_queue.put(priority, ctx)\n\n\n_reactive_environment = ReactiveEnvironment()\n\n\ndef get_current_context() -> Context:\n return _reactive_environment.current_context()\n\n\nasync def flush(*, concurrent: bool = True) -> None:\n await _reactive_environment.flush(concurrent=concurrent)\n\nshiny/progress.py METASEP\nfrom typing import Optional, Dict, Any\nfrom warnings import warn\nfrom .utils import run_coro_sync, rand_hex\nfrom .shinysession import ShinySession, _require_active_session\n\n\nclass Progress:\n _style = \"notification\"\n\n def __init__(\n self, min: int = 0, max: int = 1, session: Optional[ShinySession] = None\n ):\n self.min = min\n self.max = max\n self.value = None\n self._id = rand_hex(8)\n self._closed = False\n self._session = _require_active_session(session)\n\n msg = {\"id\": self._id, \"style\": self._style}\n self._send_progress(\"open\", msg)\n\n def set(\n self,\n value: float,\n message: Optional[str] = None,\n detail: Optional[str] = None,\n ):\n if self._closed:\n warn(\"Attempting to set progress, but progress already closed.\")\n return None\n\n self.value = value\n if value:\n # Normalize value to number between 0 and 1\n value = min(1, max(0, (value - self.min) / (self.max - self.min)))\n\n msg = {\n \"id\": self._id,\n \"message\": message,\n \"detail\": detail,\n \"value\": value,\n \"style\": self._style,\n }\n\n self._send_progress(\"update\", {k: v for k, v in msg.items() if v is not None})\n\n def inc(\n self,\n amount: float = 0.1,\n message: Optional[str] = None,\n detail: Optional[str] = None,\n ):\n if self.value is None:\n self.value = self.min\n\n value = min(self.value + amount, self.max)\n self.set(value, message, detail)\n\n def close(self):\n if self._closed:\n warn(\"Attempting to close progress, but progress already closed.\")\n return None\n\n self._send_progress(\"close\", {\"id\": self._id, \"style\": self._style})\n self._closed = True\n\n def _send_progress(self, type: str, message: Dict[str, Any]):\n return run_coro_sync(\n self._session.send_message({\"progress\": {\"type\": type, \"message\": message}})\n )\n\nshiny/page.py METASEP\nimport sys\nfrom typing import Optional, Any, List\nfrom warnings import warn\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\nfrom htmltools import tags, Tag, TagList, div, TagChildArg\n\nfrom .html_dependencies import bootstrap_deps\nfrom .navs import navs_bar\n\n\ndef page_navbar(\n *args: TagChildArg, # Create a type for nav()?\n title: Optional[TagChildArg] = None,\n id: Optional[str] = None,\n selected: Optional[str] = None,\n position: Literal[\"static-top\", \"fixed-top\", \"fixed-bottom\"] = \"static-top\",\n header: Optional[TagChildArg] = None,\n footer: Optional[TagChildArg] = None,\n bg: Optional[str] = None,\n inverse: Literal[\"auto\", True, False] = \"auto\",\n collapsible: bool = True,\n fluid: bool = True,\n window_title: Optional[str] = None,\n lang: Optional[str] = None\n) -> Tag:\n\n if title is not None and window_title is None:\n # Try to infer window_title from contents of title\n window_title = \" \".join(find_characters(title))\n if not window_title:\n warn(\n \"Unable to infer a `window_title` default from `title`. Consider providing a character string to `window_title`.\"\n )\n\n return tags.html(\n tags.head(tags.title(window_title)),\n tags.body(\n navs_bar(\n *args,\n title=title,\n id=id,\n selected=selected,\n position=position,\n header=header,\n footer=footer,\n bg=bg,\n inverse=inverse,\n collapsible=collapsible,\n fluid=fluid\n )\n ),\n lang=lang,\n )\n\n\ndef page_fluid(\n *args: Any, title: Optional[str] = None, lang: Optional[str] = None, **kwargs: str\n) -> Tag:\n return page_bootstrap(\n div(*args, class_=\"container-fluid\", **kwargs), title=title, lang=lang\n )\n\n\ndef page_fixed(\n *args: Any, title: Optional[str] = None, lang: Optional[str] = None, **kwargs: str\n) -> Tag:\n return page_bootstrap(\n div(*args, class_=\"container\", **kwargs), title=title, lang=lang\n )\n\n\n# TODO: implement theme (just Bootswatch for now?)\ndef page_bootstrap(\n *args: Any, title: Optional[str] = None, lang: Optional[str] = None\n) -> Tag:\n page = TagList(bootstrap_deps(), *args)\n head = tags.title(title) if title else None\n return tags.html(tags.head(head), tags.body(page), lang=lang)\n\n\ndef find_characters(x: Any) -> List[str]:\n if isinstance(x, str):\n return [x]\n elif isinstance(x, list):\n return [y for y in x if isinstance(y, str)]\n else:\n return []\n\nshiny/output.py METASEP\nfrom typing import Optional\nfrom htmltools import tags, Tag, div, css, TagAttrArg, TagFunction\n\n\ndef output_plot(\n id: str, width: str = \"100%\", height: str = \"400px\", inline: bool = False\n) -> Tag:\n res = output_image(id=id, width=width, height=height, inline=inline)\n res.add_class(\"shiny-plot-output\")\n return res\n\n\ndef output_image(\n id: str, width: str = \"100%\", height: str = \"400px\", inline: bool = False\n) -> Tag:\n func = tags.span if inline else div\n style = None if inline else css(width=width, height=height)\n return func(id=id, class_=\"shiny-image-output\", style=style)\n\n\ndef output_text(\n id: str, inline: bool = False, container: Optional[TagFunction] = None\n) -> Tag:\n if not container:\n container = tags.span if inline else tags.div\n return container(id=id, class_=\"shiny-text-output\") # type: ignore\n\n\ndef output_text_verbatim(id: str, placeholder: bool = False) -> Tag:\n cls = \"shiny-text-output\" + (\" noplaceholder\" if not placeholder else \"\")\n return tags.pre(id=id, class_=cls)\n\n\ndef output_ui(\n id: str,\n inline: bool = False,\n container: Optional[TagFunction] = None,\n **kwargs: TagAttrArg\n) -> Tag:\n if not container:\n container = tags.span if inline else tags.div\n return container(id=id, class_=\"shiny-html-output\", **kwargs) # type: ignore\n\nshiny/notifications.py METASEP\nimport sys\nfrom typing import Dict, Union, Optional, Any\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\nfrom htmltools import TagList, TagChildArg\n\nfrom .utils import run_coro_sync, rand_hex\nfrom .shinysession import ShinySession, _require_active_session, _process_deps\n\n\ndef notification_show(\n ui: TagChildArg,\n action: Optional[TagList] = None,\n duration: Optional[Union[int, float]] = 5,\n close_button: bool = True,\n id: Optional[str] = None,\n type: Literal[\"default\", \"message\", \"warning\", \"error\"] = \"default\",\n session: Optional[ShinySession] = None,\n):\n session = _require_active_session(session)\n\n ui_ = _process_deps(ui, session)\n action_ = _process_deps(action, session)\n\n payload: Dict[str, Any] = {\n \"html\": ui_[\"html\"],\n \"action\": action_[\"html\"],\n \"deps\": ui_[\"deps\"] + action_[\"deps\"],\n \"closeButton\": close_button,\n \"id\": id if id else rand_hex(8),\n \"type\": type,\n }\n\n if duration:\n payload.update({\"duration\": duration * 1000})\n\n return run_coro_sync(\n session.send_message({\"notification\": {\"type\": \"show\", \"message\": payload}})\n )\n\n\ndef notification_remove(id: str, session: Optional[ShinySession] = None):\n session = _require_active_session(session)\n run_coro_sync(\n session.send_message({\"notification\": {\"type\": \"remove\", \"message\": None}})\n )\n return id\n\nshiny/navs.py METASEP\nimport sys\nfrom typing import Optional, Any, Tuple\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\nfrom htmltools import jsx_tag_create, JSXTag, TagList, TagChildArg, JSXTagAttrArg\n\nfrom .html_dependencies import nav_deps\n\n\ndef nav(\n title: Any,\n *args: TagChildArg,\n value: Optional[str] = None,\n icon: TagChildArg = None,\n) -> JSXTag:\n if not value:\n value = title\n return nav_tag(\"Nav\", *args, value=value, title=TagList(icon, title))\n\n\ndef nav_menu(\n title: TagChildArg,\n *args: TagChildArg,\n value: Optional[str] = None,\n icon: TagChildArg = None,\n align: Literal[\"left\", \"right\"] = \"left\",\n) -> JSXTag:\n if not value:\n value = str(title)\n return nav_tag(\n \"NavMenu\", *args, value=value, title=TagList(icon, title), align=align\n )\n\n\n# def nav_content(value, *args, icon: TagChildArg = None) -> tag:\n# raise Exception(\"Not yet implemented\")\n\n\ndef nav_item(*args: TagChildArg) -> JSXTag:\n return nav_tag(\"NavItem\", *args)\n\n\ndef nav_spacer() -> JSXTag:\n return nav_tag(\"NavSpacer\")\n\n\ndef navs_tab(\n *args: TagChildArg,\n id: Optional[str] = None,\n selected: Optional[str] = None,\n header: Optional[TagChildArg] = None,\n footer: Optional[TagChildArg] = None,\n) -> JSXTag:\n return nav_tag(\n \"Navs\",\n *args,\n type=\"tabs\",\n id=id,\n selected=selected,\n header=header,\n footer=footer,\n )\n\n\ndef navs_tab_card(\n *args: TagChildArg,\n id: Optional[str] = None,\n selected: Optional[str] = None,\n header: Optional[TagChildArg] = None,\n footer: Optional[TagChildArg] = None,\n) -> JSXTag:\n return nav_tag(\n \"NavsCard\",\n *args,\n type=\"tabs\",\n id=id,\n selected=selected,\n header=header,\n footer=footer,\n )\n\n\ndef navs_pill(\n *args: TagChildArg,\n id: Optional[str] = None,\n selected: Optional[str] = None,\n header: Optional[TagChildArg] = None,\n footer: Optional[TagChildArg] = None,\n) -> JSXTag:\n return nav_tag(\n \"Navs\",\n *args,\n type=\"pills\",\n id=id,\n selected=selected,\n header=header,\n footer=footer,\n )\n\n\ndef navs_pill_card(\n *args: TagChildArg,\n id: Optional[str] = None,\n selected: Optional[str] = None,\n header: Optional[TagChildArg] = None,\n footer: Optional[TagChildArg] = None,\n placement: Literal[\"above\", \"below\"] = \"above\",\n) -> JSXTag:\n return nav_tag(\n \"NavsCard\",\n *args,\n type=\"pills\",\n id=id,\n selected=selected,\n header=header,\n footer=footer,\n placement=placement,\n )\n\n\ndef navs_pill_list(\n *args: TagChildArg,\n id: Optional[str] = None,\n selected: Optional[str] = None,\n header: Optional[TagChildArg] = None,\n footer: Optional[TagChildArg] = None,\n well: bool = True,\n fluid: bool = True,\n widths: Tuple[int, int] = (4, 8),\n) -> JSXTag:\n return nav_tag(\n \"NavsList\",\n *args,\n id=id,\n selected=selected,\n header=header,\n footer=footer,\n well=well,\n widthNav=widths[0],\n widthContent=widths[1],\n )\n\n\n# def navs_hidden(*args, id: Optional[str] = None, selected: Optional[str] = None, header: Any=None, footer: Any=None) -> tag:\n# return nav_tag(\"NavsHidden\", *args, id=id, selected=selected, header=header, footer=footer)\n\n\ndef navs_bar(\n *args: TagChildArg,\n title: Optional[TagChildArg] = None,\n id: Optional[str] = None,\n selected: Optional[str] = None,\n position: Literal[\"static-top\", \"fixed-top\", \"fixed-bottom\"] = \"static-top\",\n header: Optional[TagChildArg] = None,\n footer: Optional[TagChildArg] = None,\n bg: Optional[str] = None,\n inverse: Literal[\"auto\", True, False] = \"auto\",\n collapsible: bool = True,\n fluid: bool = True,\n) -> JSXTag:\n return nav_tag(\n \"NavsBar\",\n *args,\n title=title,\n id=id,\n selected=selected,\n position=position,\n header=header,\n footer=footer,\n bg=bg,\n inverse=inverse,\n collapsible=collapsible,\n fluid=fluid,\n )\n\n\ndef nav_tag(name: str, *args: TagChildArg, **kwargs: JSXTagAttrArg) -> JSXTag:\n tag = jsx_tag_create(\"bslib.\" + name)\n return tag(nav_deps(), *args, **kwargs)\n\nshiny/modal.py METASEP\nimport sys\nfrom typing import Optional, Any\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\nfrom htmltools import tags, Tag, div, HTML, TagChildArg, TagAttrArg\n\nfrom .utils import run_coro_sync\nfrom .shinysession import ShinySession, _require_active_session, _process_deps\n\n\ndef modal_button(label: str, icon: TagChildArg = None) -> Tag:\n return tags.button(\n icon,\n label,\n type=\"button\",\n class_=\"btn btn-default\",\n data_dismiss=\"modal\",\n data_bs_dismiss=\"modal\",\n )\n\n\ndef modal(\n *args: TagChildArg,\n title: Optional[str] = None,\n footer: Any = modal_button(\"Dismiss\"),\n size: Literal[\"m\", \"s\", \"l\", \"xl\"] = \"m\",\n easy_close: bool = False,\n fade: bool = True,\n **kwargs: TagAttrArg\n) -> Tag:\n\n title_div = None\n if title:\n title_div = div(tags.h4(title, class_=\"modal-title\"), class_=\"modal-header\")\n\n if footer:\n footer = div(footer, class_=\"modal-footer\")\n\n dialog = div(\n div(\n title_div,\n div(*args, class_=\"modal-body\", **kwargs),\n footer,\n class_=\"modal-content\",\n ),\n class_=\"modal-dialog\"\n + ({\"s\": \" modal-sm\", \"l\": \" modal-lg\", \"xl\": \" modal-xl\"}.get(size, \"\")),\n )\n\n # jQuery plugin doesn't work in Bootstrap 5, but vanilla JS doesn't work in Bootstrap 4 :sob:\n js = \"\\n\".join(\n [\n \"if (window.bootstrap && !window.bootstrap.Modal.VERSION.match(/^4\\\\. /)) {\",\n \" var modal=new bootstrap.Modal(document.getElementById('shiny-modal'))\",\n \" modal.show()\",\n \"} else {\",\n \" $('#shiny-modal').modal().focus()\",\n \"}\",\n ]\n )\n\n backdrop = None if easy_close else \"static\"\n keyboard = None if easy_close else \"false\"\n\n return div(\n dialog,\n tags.script(HTML(js)),\n id=\"shiny-modal\",\n class_=\"modal fade\" if fade else \"modal\",\n tabindex=\"-1\",\n data_backdrop=backdrop,\n data_bs_backdrop=backdrop,\n data_keyboard=keyboard,\n data_bs_keyboard=keyboard,\n )\n\n\ndef modal_show(modal: Tag, session: Optional[ShinySession] = None):\n session = _require_active_session(session)\n msg = _process_deps(modal)\n return run_coro_sync(\n session.send_message({\"modal\": {\"type\": \"show\", \"message\": msg}})\n )\n\n\ndef modal_remove(session: Optional[ShinySession] = None):\n session = _require_active_session(session)\n return run_coro_sync(\n session.send_message({\"modal\": {\"type\": \"remove\", \"message\": None}})\n )\n\nshiny/insert-ui.py METASEP\nimport sys\nfrom typing import Optional\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\nfrom htmltools import TagList\n\nfrom .shinysession import ShinySession, _require_active_session, _process_deps\n\n\ndef ui_insert(\n selector: str,\n ui: TagList,\n where: Literal[\"beforeEnd\", \"beforeBegin\", \"afterBegin\", \"afterEnd\"] = \"beforeEnd\",\n multiple: bool = False,\n immediate: bool = False,\n session: Optional[ShinySession] = None,\n):\n session = _require_active_session(session)\n\n def callback():\n msg = {\n \"selector\": selector,\n \"multiple\": multiple,\n \"where\": where,\n \"content\": _process_deps(ui, session),\n }\n session.send_message({\"shiny-insert-ui\": msg})\n\n # TODO: Should session have an on_flush() method? If not, how to get context object from session?\n callback() if immediate else session.on_flush(callback, once=True)\n\n\ndef ui_remove(\n selector: str,\n multiple: bool = False,\n immediate: bool = False,\n session: Optional[ShinySession] = None,\n):\n session = _require_active_session(session)\n\n def callback():\n session.send_message(\n {\"shiny-remove-ui\": {\"selector\": selector, \"multiple\": multiple}}\n )\n\n callback() if immediate else session.on_flush(callback, once=True)\n\nshiny/input_utils.py METASEP\nfrom htmltools import tags, Tag, TagChildArg\n\n\ndef shiny_input_label(id: str, label: TagChildArg = None) -> Tag:\n cls = \"control-label\" + (\"\" if label else \" shiny-label-null\")\n return tags.label(label, class_=cls, id=id + \"-label\", for_=id)\n\nshiny/input_update.py METASEP\n\nshiny/input_text.py METASEP\nfrom typing import Optional\n\nfrom htmltools import tags, Tag, div, css, TagChildArg\n\nfrom .input_utils import shiny_input_label\n\n\ndef input_text(\n id: str,\n label: TagChildArg,\n value: str = \"\",\n width: Optional[str] = None,\n placeholder: Optional[str] = None,\n) -> Tag:\n return div(\n shiny_input_label(id, label),\n tags.input(\n id=id,\n type=\"text\",\n class_=\"form-control\",\n value=value,\n placeholder=placeholder,\n ),\n class_=\"form-group shiny-input-container\",\n style=css(width=width),\n )\n\n\ndef input_text_area(\n id: str,\n label: TagChildArg,\n value: str = \"\",\n width: Optional[str] = None,\n height: Optional[str] = None,\n cols: Optional[int] = None,\n rows: Optional[int] = None,\n placeholder: Optional[str] = None,\n resize: Optional[str] = None,\n) -> Tag:\n\n if resize and resize not in [\"none\", \"both\", \"horizontal\", \"vertical\"]:\n raise ValueError(\"Invalid resize value: \" + str(resize))\n\n area = tags.textarea(\n id=id,\n class_=\"form-control\",\n style=css(width=None if width else \"100%\", height=height, resize=resize),\n placeholder=placeholder,\n rows=rows,\n cols=cols,\n children=[value],\n )\n\n return div(\n shiny_input_label(id, label),\n area,\n class_=\"form-group shiny-input-container\",\n style=css(width=width),\n )\n\nshiny/input_slider.py METASEP\nimport math\nimport sys\nfrom datetime import date, datetime, timedelta\nfrom typing import Dict, Optional, Union, Tuple, TypeVar\n\nif sys.version_info >= (3, 8):\n from typing import TypedDict\nelse:\n from typing_extensions import TypedDict\nfrom typing_extensions import NotRequired\n\nfrom htmltools import tags, Tag, div, css, TagAttrArg, TagChildArg, HTML\n\nfrom .html_dependencies import ionrangeslider_deps\nfrom .input_utils import shiny_input_label\n\n__all__ = [\"input_slider\"]\n\n# TODO: validate value(s) are within (min,max)?\n\nSliderVal = TypeVar(\"SliderVal\", int, float, datetime, date)\n\n\nclass AnimationOptions(TypedDict):\n interval: NotRequired[int]\n loop: NotRequired[bool]\n play_button: NotRequired[TagChildArg]\n pause_button: NotRequired[TagChildArg]\n\n\ndef input_slider(\n id: str,\n label: TagChildArg,\n min: SliderVal,\n max: SliderVal,\n value: Union[SliderVal, Tuple[SliderVal, SliderVal]],\n step: Optional[Union[int, float, timedelta]] = None,\n ticks: bool = True,\n animate: Union[bool, AnimationOptions] = False,\n width: Optional[str] = None,\n sep: str = \",\",\n pre: Optional[str] = None,\n post: Optional[str] = None,\n time_format: Optional[str] = None,\n timezone: Optional[str] = None,\n drag_range: bool = True,\n) -> Tag:\n\n # Thanks to generic typing, max, value, etc. should be of the same type\n data_type = _slider_type(min)\n\n # Make sure min, max, value, and step are all numeric\n # (converts dates/datetimes to milliseconds since epoch...this is the value JS wants)\n min_num = _as_numeric(min)\n max_num = _as_numeric(max)\n val_nums = (\n (_as_numeric(value[0]), _as_numeric(value[1]))\n if isinstance(value, (tuple, list))\n else (_as_numeric(value), _as_numeric(value))\n )\n step_num = _find_step_size(min_num, max_num) if step is None else _as_numeric(step)\n\n n_ticks = None\n if ticks:\n n_steps = (max_num - min_num) / step_num\n # Make sure there are <= 10 steps.\n # n_ticks can be a noninteger, which is good when the range is not an\n # integer multiple of the step size, e.g., min=1, max=10, step=4\n scale_factor = math.ceil(n_steps / 10)\n n_ticks = n_steps / scale_factor\n\n props: Dict[str, TagAttrArg] = {\n \"class_\": \"js-range-slider\",\n \"id\": id,\n \"style\": css(width=width),\n \"data_skin\": \"shiny\",\n # TODO: do we need to worry about scientific notation (i.e., formatNoSci()?)\n \"data_min\": str(min_num),\n \"data_max\": str(max_num),\n \"data_from\": str(val_nums[0]),\n \"data_step\": str(step_num),\n \"data_grid\": ticks,\n \"data_grid_num\": n_ticks,\n \"data_grid_snap\": \"false\",\n \"data_prettify_separator\": sep,\n \"data_prettify_enabled\": sep != \"\",\n \"data_prefix\": pre,\n \"data_postfix\": post,\n \"data_keyboard\": \"true\",\n \"data_data_type\": data_type,\n \"data_time_format\": time_format,\n \"data_timezone\": timezone,\n }\n\n if isinstance(value, (tuple, list)):\n props[\"data_type\"] = \"double\"\n props[\"data_to\"] = str(val_nums[1])\n props[\"data_drag_interval\"] = drag_range\n\n if not time_format and data_type[0:4] == \"date\":\n props[\"data_time_format\"] = \"%F\" if data_type == \"date\" else \"%F %T\"\n\n # ionRangeSlider wants attr = 'true'/'false'\n props = {k: str(v).lower() if isinstance(v, bool) else v for k, v in props.items()}\n\n slider_tag = div(\n shiny_input_label(id, label),\n tags.input(**props),\n *ionrangeslider_deps(),\n class_=\"form-group shiny-input-container\",\n )\n\n if animate is False:\n return slider_tag\n\n if animate is True:\n animate = AnimationOptions()\n\n animate_tag = div(\n tags.a(\n tags.span(animate.get(\"play_button\", _play_icon()), class_=\"play\"),\n tags.span(animate.get(\"pause_button\", _pause_icon()), class_=\"pause\"),\n href=\"#\",\n class_=\"slider-animate-button\",\n data_target_id=id,\n data_interval=animate.get(\"interval\", 500),\n data_loop=animate.get(\"loop\", True),\n ),\n class_=\"slider-animate-container\",\n )\n\n slider_tag.append(animate_tag)\n\n return slider_tag\n\n\ndef _slider_type(x: SliderVal) -> str:\n if isinstance(x, datetime):\n return \"datetime\"\n if isinstance(x, date):\n return \"date\"\n return \"number\"\n\n\ndef _as_numeric(x: Union[int, float, datetime, date, timedelta]) -> Union[int, float]:\n if isinstance(x, timedelta):\n return x.total_seconds() * 1000\n if isinstance(x, datetime):\n return x.timestamp() * 1000\n if isinstance(x, date):\n return datetime(x.year, x.month, x.day).timestamp() * 1000\n return x\n\n\ndef _find_step_size(\n min: Union[int, float], max: Union[int, float]\n) -> Union[int, float]:\n # TODO: this is a naive version of shiny::findStepSize() that might be susceptible to\n # rounding errors? https://github.com/rstudio/shiny/pull/1956\n range = max - min\n\n if range < 2 or isinstance(min, float) or isinstance(max, float):\n step = range / 100\n # Round the step to get rid of any floating point arithmetic errors by\n # mimicing what signif(digits = 10, step) does in R (see Description of ?signif)\n # (the basic intuition is that smaller differences need more precision)\n return round(step, 10 - math.ceil(math.log10(step)))\n else:\n return 1\n\n\ndef _play_icon() -> HTML:\n try:\n from fontawesome import icon_svg\n\n return icon_svg(\"play\")\n except ImportError:\n return HTML(\"&#x23ef;\")\n\n\ndef _pause_icon() -> HTML:\n try:\n from fontawesome import icon_svg\n\n return icon_svg(\"pause\")\n except ImportError:\n return HTML(\"&#9616;&#9616;\")\n\nshiny/input_select.py METASEP\nfrom typing import Optional, Dict, Union, List, cast\n\nfrom htmltools import Tag, tags, div, TagChildArg\n\nfrom .html_dependencies import selectize_deps\nfrom .input_utils import shiny_input_label\n\n# This is the canonical format for representing select options.\nSelectInputOptions = Dict[str, Union[str, Dict[str, str]]]\n\n\ndef input_selectize(\n id: str,\n label: TagChildArg,\n choices: Union[List[str], Dict[str, Union[str, List[str], Dict[str, str]]]],\n *,\n selected: Optional[str] = None,\n multiple: bool = False,\n width: Optional[str] = None,\n size: Optional[str] = None,\n) -> Tag:\n\n return input_select(\n id,\n label,\n choices,\n selected=selected,\n multiple=multiple,\n selectize=True,\n width=width,\n size=size,\n )\n # # Make sure accessibility plugin is included by default\n # if not options.get(\"plugins\", None):\n # options[\"plugins\"] = []\n # if \"selectize-plugin-a11y\" not in options[\"plugins\"]:\n # options[\"plugins\"].append(\"selectize-plugin-a11y\")\n # deps = [selectize_deps()]\n # if \"drag_drop\" in options[\"plugins\"]:\n # deps.append(jqui_deps())\n # return jsx_tag_create(\"InputSelectize\")(deps, id=id, options=options, **kwargs)\n\n\ndef input_select(\n id: str,\n label: TagChildArg,\n choices: Union[List[str], Dict[str, Union[str, List[str], Dict[str, str]]]],\n *,\n selected: Optional[str] = None,\n multiple: bool = False,\n selectize: bool = False,\n width: Optional[str] = None,\n size: Optional[str] = None,\n) -> Tag:\n\n choices_ = _normalize_choices(choices)\n if selected is None:\n selected = _find_first_option(choices_)\n\n choices_tags = _render_choices(choices_, selected)\n\n return div(\n shiny_input_label(id, label),\n div(\n tags.select(\n *choices_tags,\n id=id,\n class_=None if selectize else \"form-select\",\n multiple=multiple,\n width=width,\n size=size,\n ),\n (\n [\n tags.script(\"{}\", type=\"application/json\", data_for=id),\n selectize_deps(),\n ]\n if selectize\n else None\n ),\n ),\n class_=\"form-group shiny-input-container\",\n )\n\n\n# x can be structured like any of the following:\n# List:\n# [\"a\", \"b\", \"c\"]\n# Dictionary:\n# {\"Choice A\": \"a\", \"Choice B\": \"b\", \"Choice C\": \"c\"}\n# Dictionary with sub-lists or sub-dictionaries (which are optgroups):\n# {\n# \"Choice A\": \"a\",\n# \"Group B\": {\"Choice B1\": \"b1\", \"Choice B2\": \"b2\"},\n# \"Group C: [\"c1, \"c2\"]\n# }\ndef _normalize_choices(\n x: Union[List[str], Dict[str, Union[str, List[str], Dict[str, str]]]]\n) -> SelectInputOptions:\n if isinstance(x, list):\n return {k: k for k in x}\n\n # If we got here, it's a dict. The value of each item.\n result = x.copy()\n for (k, value) in result.items():\n # Convert list[str] to dict[str, str], but leave str, and dict[str, str] alone.\n if isinstance(value, list):\n result[k] = {k: k for k in value}\n\n # The type checker isn't smart enough to realize that none of the values are lists\n # at this point, so tell it to ignore the type.\n return result # type: ignore\n\n\ndef _render_choices(x: SelectInputOptions, selected: Optional[str] = None) -> List[Tag]:\n result: List[Tag] = []\n for (label, value) in x.items():\n if isinstance(value, dict):\n # Type checker needs a little help here -- value is already a narrower type\n # than SelectInputOptions.\n value = cast(SelectInputOptions, value)\n result.append(\n tags.optgroup(*(_render_choices(value, selected)), label=label)\n )\n else:\n result.append(tags.option(label, value=value, selected=(value == selected)))\n\n return result\n\n\n# Returns the first option in a SelectInputOptions object. For most cases, this is\n# straightforward. In the following, the first option is \"a\":\n# { \"Choice A\": \"a\", \"Choice B\": \"b\", \"Choice C\": \"c\" }\n#\n# Sometimes the first option is nested within an optgroup. For example, in the\n# following, the first option is \"b1\":\n# {\n# \"Group A\": {},\n# \"Group B\": {\"Choice B1\": \"b1\", \"Choice B2\": \"b2\"},\n# }\ndef _find_first_option(x: SelectInputOptions) -> Optional[str]:\n for (_label, value) in x.items():\n if isinstance(value, dict):\n value = cast(SelectInputOptions, value)\n result = _find_first_option(value)\n if result is not None:\n return result\n else:\n return value\n\n return None\n\nshiny/input_password.py METASEP\nfrom typing import Optional\n\nfrom htmltools import tags, Tag, div, css, TagChildArg\n\nfrom .input_utils import shiny_input_label\n\n\ndef input_password(\n id: str,\n label: TagChildArg,\n value: str = \"\",\n width: Optional[str] = None,\n placeholder: Optional[str] = None,\n) -> Tag:\n return div(\n shiny_input_label(id, label),\n tags.input(\n id=id,\n type=\"password\",\n value=value,\n class_=\"form-control\",\n placeholder=placeholder,\n ),\n class_=\"form-group shiny-input-container\",\n style=css(width=width),\n )\n\nshiny/input_numeric.py METASEP\nfrom typing import Optional, Union\n\nfrom htmltools import tags, Tag, div, css, TagChildArg\n\nfrom .input_utils import shiny_input_label\n\nvalType = Union[int, float]\n\n\ndef input_numeric(\n id: str,\n label: TagChildArg,\n value: valType,\n min: Optional[valType] = None,\n max: Optional[valType] = None,\n step: Optional[valType] = None,\n width: Optional[str] = None,\n) -> Tag:\n return div(\n shiny_input_label(id, label),\n tags.input(\n id=id,\n type=\"number\",\n class_=\"form-control\",\n value=value,\n min=min,\n max=max,\n step=step,\n ),\n class_=\"form-group shiny-input-container\",\n style=css(width=width),\n )\n\nshiny/input_handlers.py METASEP\nfrom datetime import date, datetime\nfrom typing import TYPE_CHECKING, Callable, Dict, Union, List, Any, TypeVar\n\nif TYPE_CHECKING:\n from .shinysession import ShinySession\n\nInputHandlerType = Callable[[Any, str, \"ShinySession\"], Any]\n\n\nclass _InputHandlers(Dict[str, InputHandlerType]):\n def __init__(self):\n super().__init__()\n\n def add(self, name: str, force: bool = False) -> Callable[[InputHandlerType], None]:\n def _(func: InputHandlerType):\n if name in self and not force:\n raise ValueError(f\"Input handler {name} already registered\")\n self[name] = func\n return None\n\n return _\n\n def remove(self, name: str):\n del self[name]\n\n def process_value(\n self, type: str, value: Any, name: str, session: \"ShinySession\"\n ) -> Any:\n handler = self.get(type)\n if handler is None:\n raise ValueError(\"No input handler registered for type: \" + type)\n return handler(value, name, session)\n\n\ninput_handlers = _InputHandlers()\n\n\n_NumberType = TypeVar(\"_NumberType\", int, float, None)\n\n# Doesn't do anything since it seems weird to coerce None into some sort of NA (like we do in R)?\n@input_handlers.add(\"shiny.number\")\ndef _(value: _NumberType, name: str, session: \"ShinySession\") -> _NumberType:\n return value\n\n\n# TODO: implement when we have bookmarking\n@input_handlers.add(\"shiny.password\")\ndef _(value: str, name: str, session: \"ShinySession\") -> str:\n return value\n\n\n@input_handlers.add(\"shiny.date\")\ndef _(\n value: Union[str, List[str]], name: str, session: \"ShinySession\"\n) -> Union[date, List[date]]:\n if isinstance(value, str):\n return datetime.strptime(value, \"%Y-%m-%d\").date()\n return [datetime.strptime(v, \"%Y-%m-%d\").date() for v in value]\n\n\n@input_handlers.add(\"shiny.datetime\")\ndef _(\n value: Union[int, float, List[int], List[float]], name: str, session: \"ShinySession\"\n) -> Union[datetime, List[datetime]]:\n if isinstance(value, (int, float)):\n return datetime.utcfromtimestamp(value)\n return [datetime.utcfromtimestamp(v) for v in value]\n\n\nclass ActionButtonValue(int):\n pass\n\n\n@input_handlers.add(\"shiny.action\")\ndef _(value: int, name: str, session: \"ShinySession\") -> ActionButtonValue:\n return ActionButtonValue(value)\n\n\n# TODO: implement when we have bookmarking\n@input_handlers.add(\"shiny.file\")\ndef _(value: Any, name: str, session: \"ShinySession\") -> Any:\n return value\n\nshiny/input_file.py METASEP\nfrom typing import Optional, List\n\nfrom htmltools import tags, Tag, div, span, css, TagChildArg\n\nfrom .input_utils import shiny_input_label\n\n\ndef input_file(\n id: str,\n label: TagChildArg,\n multiple: bool = False,\n accept: Optional[List[str]] = None,\n width: Optional[str] = None,\n button_label: str = \"Browse...\",\n placeholder: str = \"No file selected\",\n) -> Tag:\n btn_file = span(\n button_label,\n tags.input(\n id=id,\n name=id,\n type=\"file\",\n multiple=\"multiple\" if multiple else None,\n accept=\",\".join(accept) if accept else None,\n # Don't use \"display: none;\" style, which causes keyboard accessibility issue; instead use the following workaround: https://css-tricks.com/places-its-tempting-to-use-display-none-but-dont/\n style=\"position: absolute !important; top: -99999px !important; left: -99999px !important;\",\n ),\n class_=\"btn btn-default btn-file\",\n )\n return div(\n shiny_input_label(id, label),\n div(\n tags.label(btn_file, class_=\"input-group-btn input-group-prepend\"),\n tags.input(\n type=\"text\",\n class_=\"form-control\",\n placeholder=placeholder,\n readonly=\"readonly\",\n ),\n class_=\"input-group\",\n ),\n div(\n div(class_=\"progress-bar\"),\n id=id + \"_progress\",\n class_=\"progress active shiny-file-input-progress\",\n ),\n class_=\"form-group shiny-input-container\",\n style=css(width=width),\n )\n\nshiny/input_date.py METASEP\nimport json\nfrom datetime import date\nfrom typing import Optional\n\nfrom htmltools import tags, Tag, div, span, TagAttrArg, TagChildArg, css\n\nfrom .html_dependencies import datepicker_deps\nfrom .input_utils import shiny_input_label\n\n__all__ = [\"input_date\", \"input_date_range\"]\n\n\ndef input_date(\n id: str,\n label: TagChildArg,\n value: Optional[date] = None,\n min: Optional[date] = None,\n max: Optional[date] = None,\n format: str = \"yyyy-mm-dd\",\n startview: str = \"month\",\n weekstart: int = 0,\n language: str = \"en\",\n width: Optional[str] = None,\n autoclose: bool = True,\n datesdisabled: Optional[str] = None,\n daysofweekdisabled: Optional[str] = None,\n) -> Tag:\n # TODO: needed?\n # value = dateYMD(value, \"value\")\n # min = dateYMD(min, \"min\")\n # max = dateYMD(max, \"max\")\n # datesdisabled = dateYMD(datesdisabled, \"datesdisabled\")\n return div(\n shiny_input_label(id, label),\n date_input_tag(\n id=id,\n value=value,\n min=min,\n max=max,\n format=format,\n startview=startview,\n weekstart=weekstart,\n language=language,\n autoclose=autoclose,\n data_date_dates_disabled=json.dumps(datesdisabled),\n data_date_days_of_week_disabled=json.dumps(daysofweekdisabled),\n ),\n id=id,\n class_=\"shiny-date-input form-group shiny-input-container\",\n style=css(width=width),\n )\n\n\ndef input_date_range(\n id: str,\n label: TagChildArg,\n start: Optional[date] = None,\n end: Optional[date] = None,\n min: Optional[date] = None,\n max: Optional[date] = None,\n format: str = \"yyyy-mm-dd\",\n startview: str = \"month\",\n weekstart: int = 0,\n language: str = \"en\",\n separator: str = \" to \",\n width: Optional[str] = None,\n autoclose: bool = True,\n) -> Tag:\n # TODO: needed?\n # start = dateYMD(start, \"start\")\n # end = dateYMD(end, \"end\")\n # min = dateYMD(min, \"min\")\n # max = dateYMD(max, \"max\")\n return div(\n shiny_input_label(id, label),\n div(\n date_input_tag(\n id=id,\n value=start,\n min=min,\n max=max,\n format=format,\n startview=startview,\n weekstart=weekstart,\n language=language,\n autoclose=autoclose,\n ),\n # input-group-prepend and input-group-append are for bootstrap 4 forward compat\n span(\n span(separator, class_=\"input-group-text\"),\n class_=\"input-group-addon input-group-prepend input-group-append\",\n ),\n date_input_tag(\n id=id,\n value=end,\n min=min,\n max=max,\n format=format,\n startview=startview,\n weekstart=weekstart,\n language=language,\n autoclose=autoclose,\n ),\n # input-daterange class is needed for dropdown behavior\n class_=\"input-daterange input-group input-group-sm\",\n ),\n id=id,\n class_=\"shiny-date-range-input form-group shiny-input-container\",\n style=css(width=width),\n )\n\n\ndef date_input_tag(\n id: str,\n value: Optional[date],\n min: Optional[date],\n max: Optional[date],\n format: str,\n startview: str,\n weekstart: int,\n language: str,\n autoclose: bool,\n **kwargs: TagAttrArg,\n):\n return tags.input(\n datepicker_deps(),\n type=\"text\",\n class_=\"form-control\",\n # `aria-labelledby` attribute is required for accessibility to avoid doubled labels (#2951).\n aria_labelledby=id + \"-label\",\n # title attribute is announced for screen readers for date format.\n title=\"Date format: \" + format,\n data_date_language=language,\n data_date_week_start=weekstart,\n data_date_format=format,\n data_date_start_view=startview,\n data_min_date=min,\n data_max_date=max,\n data_initial_date=value,\n data_date_autoclose=\"true\" if autoclose else \"false\",\n **kwargs,\n )\n\nshiny/input_check_radio.py METASEP\nfrom typing import Optional, Union, List, Dict\n\nfrom htmltools import tags, Tag, div, span, css, TagChildArg\n\nfrom .input_utils import shiny_input_label\n\n\ndef input_checkbox(\n id: str, label: TagChildArg, value: bool = False, width: Optional[str] = None\n) -> Tag:\n return div(\n div(\n tags.label(\n tags.input(\n id=id, type=\"checkbox\", checked=\"checked\" if value else None\n ),\n span(label),\n ),\n class_=\"checkbox\",\n ),\n class_=\"form-group shiny-input-container\",\n style=css(width=width),\n )\n\n\nchoicesType = Union[Dict[str, str], List[str]]\n\n\ndef input_checkbox_group(\n id: str,\n label: TagChildArg,\n choices: choicesType,\n choice_names: Optional[List[str]] = None,\n selected: Optional[str] = None,\n inline: bool = False,\n width: Optional[str] = None,\n) -> Tag:\n input_label = shiny_input_label(id, label)\n options = generate_options(\n id=id,\n type=\"checkbox\",\n choices=choices,\n choice_names=choice_names,\n selected=selected,\n inline=inline,\n )\n return div(\n input_label,\n options,\n id=id,\n style=css(width=width),\n class_=\"form-group shiny-input-checkboxgroup shiny-input-container\"\n + (\" shiny-input-container-inline\" if inline else \"\"),\n # https://www.w3.org/TR/wai-aria-practices/examples/checkbox/checkbox-1/checkbox-1.html\n role=\"group\",\n aria_labelledby=input_label.attrs.get(\"id\"),\n )\n\n\ndef input_radio_buttons(\n id: str,\n label: TagChildArg,\n choices: choicesType,\n choice_names: Optional[List[str]] = None,\n selected: Optional[str] = None,\n inline: bool = False,\n width: Optional[str] = None,\n) -> Tag:\n input_label = shiny_input_label(id, label)\n options = generate_options(\n id=id,\n type=\"radio\",\n choices=choices,\n choice_names=choice_names,\n selected=selected,\n inline=inline,\n )\n return div(\n input_label,\n options,\n id=id,\n style=css(width=width),\n class_=\"form-group shiny-input-radiogroup shiny-input-container\"\n + (\" shiny-input-container-inline\" if inline else \"\"),\n # https://www.w3.org/TR/2017/WD-wai-aria-practices-1.1-20170628/examples/radio/radio-1/radio-1.html\n role=\"radiogroup\",\n aria_labelledby=input_label.attrs.get(\"id\"),\n )\n\n\ndef generate_options(\n id: str,\n type: str,\n choices: choicesType,\n choice_names: Optional[List[str]],\n selected: Optional[str],\n inline: bool,\n):\n if not choice_names:\n choice_names = list(choices.keys()) if isinstance(choices, dict) else choices\n choices = [v for k, v in choices.items()] if isinstance(choices, dict) else choices\n if type == \"radio\" and not selected:\n selected = choices[0]\n return div(\n *[\n generate_option(id, type, choices[i], choice_names[i], selected, inline)\n for i in range(len(choices))\n ],\n class_=\"shiny-options-group\",\n )\n\n\ndef generate_option(\n id: str,\n type: str,\n choice: str,\n choice_name: str,\n selected: Optional[str],\n inline: bool,\n):\n input = tags.input(\n type=type,\n name=id,\n value=choice,\n checked=\"checked\" if selected == choice else None,\n )\n if inline:\n return tags.label(input, span(choice_name), class_=type + \"-inline\")\n else:\n return div(tags.label(input, span(choice_name)), class_=type)\n\nshiny/input_button.py METASEP\nfrom typing import Optional\n\nfrom htmltools import tags, Tag, TagChildArg, TagAttrArg, css\n\n\ndef input_button(\n id: str,\n label: TagChildArg,\n icon: TagChildArg = None,\n width: Optional[str] = None,\n **kwargs: TagAttrArg,\n) -> Tag:\n return tags.button(\n icon,\n label,\n id=id,\n type=\"button\",\n class_=\"btn btn-default action-button\",\n style=css(width=width),\n **kwargs,\n )\n\n\ndef input_link(\n id: str,\n label: TagChildArg,\n icon: TagChildArg = None,\n **kwargs: TagAttrArg,\n) -> Tag:\n return tags.a(icon, label, id=id, href=\"#\", class_=\"action-button\", **kwargs)\n\nshiny/http_staticfiles.py METASEP\n\"\"\"\nWe can't use starlette's StaticFiles when running in wasm mode, because it launches a\nthread. Instead, use our own crappy version. Fortunately, this is all we need.\n\nWhen running in native Python mode, use the starlette StaticFiles impl; it's battle\ntested, whereas ours is not. Under wasm, it's OK if ours has bugs, even security holes:\neverything is running in the browser sandbox including the filesystem, so there's\nnothing we could disclose that an attacker wouldn't already have access to. The same is\nnot true when running in native Python, we want to be as safe as possible.\n\"\"\"\n\nimport sys\n\nif \"pyodide\" not in sys.modules:\n # Running in native mode; use starlette StaticFiles\n\n import starlette.staticfiles\n\n StaticFiles = starlette.staticfiles.StaticFiles # type: ignore\n\nelse:\n # Running in wasm mode; must use our own simple StaticFiles\n\n from typing import Optional, Tuple, MutableMapping, Iterable\n from starlette.types import Scope, Receive, Send\n from starlette.responses import PlainTextResponse\n import os\n import os.path\n import mimetypes\n import pathlib\n import urllib.parse\n\n class StaticFiles:\n dir: pathlib.Path\n root_path: str\n\n def __init__(self, directory: str):\n self.dir = pathlib.Path(os.path.realpath(os.path.normpath(directory)))\n\n async def __call__(self, scope: Scope, receive: Receive, send: Send):\n if scope[\"type\"] != \"http\":\n raise AssertionError(\"StaticFiles can't handle non-http request\")\n path = scope[\"path\"]\n path_segments = path.split(\"/\")\n final_path, trailing_slash = traverse_url_path(self.dir, path_segments)\n if final_path is None:\n return await Error404()(scope, receive, send)\n\n if not final_path.exists():\n return await Error404()(scope, receive, send)\n\n # Sanity check that final path is under self.dir, and if not, 404\n if not final_path.is_relative_to(self.dir):\n return await Error404()(scope, receive, send)\n\n # Serve up the path\n\n if final_path.is_dir():\n if trailing_slash:\n # We could serve up index.html or directory listing if we wanted\n return await Error404()(scope, receive, send)\n else:\n # We could redirect with an added \"/\" if we wanted\n return await Error404()(scope, receive, send)\n else:\n return await FileResponse(final_path)(scope, receive, send)\n\n def traverse_url_path(\n dir: pathlib.Path[str], path_segments: list[str]\n ) -> Tuple[Optional[pathlib.Path[str]], bool]:\n assert len(path_segments) > 0\n\n new_dir = dir\n path_segment = urllib.parse.unquote(path_segments.pop(0))\n # Gratuitous whitespace is not allowed\n if path_segment != path_segment.strip():\n return None, False\n\n # Check for illegal paths\n if \"/\" in path_segment:\n return None, False\n elif path_segment == \"..\" or path_segment == \".\":\n return None, False\n\n if path_segment != \"\":\n new_dir = dir / path_segment\n\n if len(path_segments) == 0:\n return new_dir, path_segment == \"\"\n else:\n return traverse_url_path(new_dir, path_segments)\n\n class Error404(PlainTextResponse):\n def __init__(self):\n super().__init__(\"404\", status_code=404) # type: ignore\n\n class FileResponse:\n file: os.PathLike[str]\n headers: Optional[MutableMapping[str, str]]\n media_type: str\n\n def __init__(\n self,\n file: os.PathLike[str],\n headers: Optional[MutableMapping[str, str]] = None,\n media_type: Optional[str] = None,\n ) -> None:\n self.headers = headers\n self.file = file\n\n if media_type is None:\n media_type, _ = mimetypes.guess_type(file, strict=False)\n if media_type is None:\n media_type = \"application/octet-stream\"\n self.media_type = media_type\n\n async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:\n await send(\n {\n \"type\": \"http.response.start\",\n \"status\": 200,\n \"headers\": convert_headers(self.headers, self.media_type),\n }\n )\n with open(self.file, \"rb\") as f:\n data = f.read()\n await send(\n {\"type\": \"http.response.body\", \"body\": data, \"more_body\": False}\n )\n\n def convert_headers(\n headers: Optional[MutableMapping[str, str]], media_type: Optional[str] = None\n ) -> Iterable[Tuple[bytes, bytes]]:\n if headers is None:\n headers = {}\n\n header_list = [\n (k.encode(\"latin-1\"), v.encode(\"latin-1\")) for k, v in headers.items()\n ]\n if media_type is not None:\n header_list += [\n (\n b\"Content-Type\",\n media_type.encode(\"latin-1\"),\n )\n ]\n return header_list\n\nshiny/html_dependencies.py METASEP\nfrom htmltools import HTMLDependency, HTML\nfrom typing import List, Union\n\n\ndef shiny_deps() -> HTMLDependency:\n return HTMLDependency(\n name=\"shiny\",\n version=\"0.0.1\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/\"},\n script={\"src\": \"shiny.js\"},\n stylesheet={\"href\": \"shiny.min.css\"},\n )\n\n\ndef bootstrap_deps(bs3compat: bool = True) -> List[HTMLDependency]:\n dep = HTMLDependency(\n name=\"bootstrap\",\n version=\"5.0.1\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/bootstrap/\"},\n script={\"src\": \"bootstrap.bundle.min.js\"},\n stylesheet={\"href\": \"bootstrap.min.css\"},\n )\n deps = [jquery_deps(), dep]\n if bs3compat:\n deps.append(bs3compat_deps())\n return deps\n\n\n# TODO: if we want to support glyphicons we'll need to bundle font files, too\ndef bs3compat_deps() -> HTMLDependency:\n return HTMLDependency(\n name=\"bs3-compat\",\n version=\"1.0\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/bs3compat/\"},\n script=[{\"src\": \"transition.js\"}, {\"src\": \"tabs.js\"}, {\"src\": \"bs3compat.js\"}],\n )\n\n\ndef jquery_deps() -> HTMLDependency:\n return HTMLDependency(\n name=\"jquery\",\n version=\"3.6.0\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/jquery/\"},\n script={\"src\": \"jquery-3.6.0.min.js\"},\n )\n\n\ndef nav_deps(\n include_bootstrap: bool = True,\n) -> Union[HTMLDependency, List[HTMLDependency]]:\n dep = HTMLDependency(\n name=\"bslib-navs\",\n version=\"1.0\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/bslib/dist/\"},\n script={\"src\": \"navs.min.js\"},\n )\n return [dep, *bootstrap_deps()] if include_bootstrap else dep\n\n\ndef ionrangeslider_deps() -> List[HTMLDependency]:\n return [\n HTMLDependency(\n name=\"ionrangeslider\",\n version=\"2.3.1\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/ionrangeslider/\"},\n script={\"src\": \"js/ion.rangeSlider.min.js\"},\n stylesheet={\"href\": \"css/ion.rangeSlider.css\"},\n ),\n HTMLDependency(\n name=\"strftime\",\n version=\"0.9.2\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/strftime/\"},\n script={\"src\": \"strftime-min.js\"},\n ),\n ]\n\n\ndef datepicker_deps() -> HTMLDependency:\n return HTMLDependency(\n name=\"bootstrap-datepicker\",\n version=\"1.9.0\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/datepicker/\"},\n # TODO: pre-compile the Bootstrap 5 version?\n stylesheet={\"href\": \"css/bootstrap-datepicker3.min.css\"},\n script={\"src\": \"js/bootstrap-datepicker.min.js\"},\n # Need to enable noConflict mode. See #1346.\n head=HTML(\n \"\"\n ),\n )\n\n\ndef selectize_deps() -> HTMLDependency:\n return HTMLDependency(\n name=\"selectize\",\n version=\"0.12.6\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/selectize/\"},\n script=[\n {\"src\": \"js/selectize.min.js\"},\n {\"src\": \"accessibility/js/selectize-plugin-a11y.min.js\"},\n ],\n # TODO: pre-compile the Bootstrap 5 version?\n stylesheet={\"href\": \"css/selectize.bootstrap3.css\"},\n )\n\n\ndef jqui_deps() -> HTMLDependency:\n return HTMLDependency(\n name=\"jquery-ui\",\n version=\"1.12.1\",\n source={\"package\": \"shiny\", \"subdir\": \"www/shared/jqueryui/\"},\n script={\"src\": \"jquery-ui.min.js\"},\n stylesheet={\"href\": \"jquery-ui.min.css\"},\n )\n\nshiny/fileupload.py METASEP\nimport sys\nfrom typing import Optional, BinaryIO, List\nimport typing\nimport tempfile\nimport os\nimport copy\nimport shutil\nimport pathlib\n\nif sys.version_info >= (3, 8):\n from typing import TypedDict\nelse:\n from typing_extensions import TypedDict\n\n\nfrom . import utils\n\n# File uploads happen through a series of requests. This requires a browser\n# which supports the HTML5 File API.\n#\n# 1. Client tells server that one or more files are about to be uploaded, with\n# an \"uploadInit\" message; the server responds with a \"jobId\" and \"uploadUrl\"\n# that the client should use to upload the files. From the server's\n# perspective, the messages look like this:\n# RECV {\"method\":\"uploadInit\",\"args\":[[{\"name\":\"mtcars.csv\",\"size\":1303,\"type\":\"text/csv\"}]],\"tag\":2}\n# SEND {\"response\":{\"tag\":2,\"value\":{\"jobId\":\"1651ddebfb643a26e6f18aa1\",\"uploadUrl\":\"session/3cdbe3c4d1318225fee8f2e3417a1c99/upload/1651ddebfb643a26e6f18aa1?w=\"}}}\n#\n# 2. For each file (sequentially):\n# b. Client makes a POST request with the file data.\n# c. Server sends a 200 response to the client.\n#\n# 3. Repeat 2 until all files have been uploaded.\n#\n# 4. Client tells server that all files have been uploaded, along with the\n# input ID that this data should be associated with. The server responds\n# with the tag ID and a null message. The messages look like this:\n# RECV {\"method\":\"uploadEnd\",\"args\":[\"1651ddebfb643a26e6f18aa1\",\"file1\"],\"tag\":3}\n# SEND {\"response\":{\"tag\":3,\"value\":null}}\n\n\n# Information about a single file, with a structure like:\n# {'name': 'mtcars.csv', 'size': 1303, 'type': 'text/csv', 'datapath: '/...../mtcars.csv'}\n# The incoming data doesn't include 'datapath'; that field is added by the\n# FileUploadOperation class.\nclass FileInfo(TypedDict):\n name: str\n size: int\n type: str\n datapath: str\n\n\nclass FileUploadOperation:\n def __init__(\n self, parent: \"FileUploadManager\", id: str, dir: str, file_infos: List[FileInfo]\n ) -> None:\n self._parent: FileUploadManager = parent\n self._id: str = id\n self._dir: str = dir\n # Copy file_infos and add a \"datapath\" entry for each file.\n self._file_infos: list[FileInfo] = [\n typing.cast(FileInfo, {**fi, \"datapath\": \"\"})\n for fi in copy.deepcopy(file_infos)\n ]\n self._n_uploaded: int = 0\n self._current_file_obj: Optional[BinaryIO] = None\n\n # Start uploading one of the files.\n def file_begin(self) -> None:\n file_info: FileInfo = self._file_infos[self._n_uploaded]\n file_ext = pathlib.Path(file_info[\"name\"]).suffix\n file_info[\"datapath\"] = os.path.join(\n self._dir, str(self._n_uploaded) + file_ext\n )\n self._current_file_obj = open(file_info[\"datapath\"], \"ab\")\n\n # Finish uploading one of the files.\n def file_end(self) -> None:\n if self._current_file_obj is not None:\n self._current_file_obj.close()\n self._current_file_obj = None\n self._n_uploaded += 1\n\n # Write a chunk of data for the currently-open file.\n def write_chunk(self, chunk: bytes) -> None:\n if self._current_file_obj is None:\n raise RuntimeError(f\"FileUploadOperation for {self._id} is not open.\")\n self._current_file_obj.write(chunk)\n\n # End the entire operation, which can consist of multiple files.\n def finish(self) -> List[FileInfo]:\n if self._n_uploaded != len(self._file_infos):\n raise RuntimeError(\n f\"Not all files for FileUploadOperation {self._id} were uploaded.\"\n )\n self._parent.on_job_finished(self._id)\n return self._file_infos\n\n # Context handlers for `with`\n def __enter__(self) -> None:\n self.file_begin()\n\n def __exit__(self, type, value, trace) -> None: # type: ignore\n self.file_end()\n\n\nclass FileUploadManager:\n def __init__(self) -> None:\n # TODO: Remove basedir when app exits.\n self._basedir: str = tempfile.mkdtemp(prefix=\"fileupload-\")\n self._operations: dict[str, FileUploadOperation] = {}\n\n def create_upload_operation(self, file_infos: List[FileInfo]) -> str:\n job_id = utils.rand_hex(12)\n dir = tempfile.mkdtemp(dir=self._basedir)\n self._operations[job_id] = FileUploadOperation(self, job_id, dir, file_infos)\n return job_id\n\n def get_upload_operation(self, id: str) -> Optional[FileUploadOperation]:\n if id in self._operations:\n return self._operations[id]\n else:\n return None\n\n def on_job_finished(self, job_id: str) -> None:\n del self._operations[job_id]\n\n # Remove the directories containing file uploads; this is to be called when\n # a session ends.\n def rm_upload_dir(self) -> None:\n shutil.rmtree(self._basedir)\n\nshiny/datastructures.py METASEP\nfrom typing import TypeVar, Generic\nfrom queue import PriorityQueue\n\n\nT = TypeVar(\"T\")\n\n\nclass PriorityQueueFIFO(Generic[T]):\n \"\"\"\n Similar to queue.PriorityQueue, except that if two elements have the same\n priority, they are returned in the order they were inserted. Also, the item\n is kept separate from the priority value (with PriorityQueue, the priority\n is part of the item).\n \"\"\"\n\n def __init__(self) -> None:\n self._pq: PriorityQueue[tuple[int, int, T]] = PriorityQueue()\n self._counter: int = 0\n\n def put(self, priority: int, item: T) -> None:\n \"\"\"\n Add an item to the queue.\n\n Parameters:\n priority (int): The priority of the item. Higher priority items will\n come out of the queue before lower priority items.\n item (T): The item to put in the queue.\n \"\"\"\n self._counter += 1\n self._pq.put((-priority, self._counter, item))\n\n def get(self) -> T:\n return self._pq.get()[2]\n\n def empty(self) -> bool:\n return self._pq.empty()\n\nshiny/connmanager.py METASEP\nfrom abc import ABC, abstractmethod\nfrom typing import Optional\nimport starlette.websockets\nfrom starlette.websockets import WebSocketState\n\n\nclass Connection(ABC):\n \"\"\"Abstract class to serve a session and send/receive messages to the\n client.\"\"\"\n\n @abstractmethod\n async def send(self, message: str) -> None:\n ...\n\n @abstractmethod\n async def receive(self) -> str:\n ...\n\n @abstractmethod\n async def close(self, code: int, reason: Optional[str]) -> None:\n ...\n\n\nclass StarletteConnection(Connection):\n def __init__(self, conn: starlette.websockets.WebSocket):\n self.conn: starlette.websockets.WebSocket = conn\n\n async def accept(self, subprotocol: Optional[str] = None):\n await self.conn.accept(subprotocol) # type: ignore\n\n async def send(self, message: str) -> None:\n if self._is_closed():\n return\n\n await self.conn.send_text(message)\n\n async def receive(self) -> str:\n if self._is_closed():\n raise ConnectionClosed()\n\n try:\n return await self.conn.receive_text()\n except starlette.websockets.WebSocketDisconnect:\n raise ConnectionClosed()\n\n async def close(self, code: int, reason: Optional[str]) -> None:\n if self._is_closed():\n return\n\n await self.conn.close(code)\n\n def _is_closed(self) -> bool:\n return (\n self.conn.application_state == WebSocketState.DISCONNECTED # type: ignore\n or self.conn.client_state == WebSocketState.DISCONNECTED # type: ignore\n )\n\n\nclass ConnectionClosed(Exception):\n \"\"\"Raised when a Connection is closed from the other side.\"\"\"\n\n pass\n\nshiny/bootstrap.py METASEP\nimport sys\nfrom typing import Callable, Optional\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\nfrom htmltools import (\n TagChildArg,\n TagAttrArg,\n TagList,\n Tag,\n div,\n tags,\n h2,\n css,\n span,\n HTML,\n)\n\nfrom .html_dependencies import jqui_deps\n\n\ndef row(*args: TagChildArg, **kwargs: TagAttrArg) -> Tag:\n return div(*args, class_=\"row\", **kwargs)\n\n\ndef column(\n width: int, *args: TagChildArg, offset: int = 0, **kwargs: TagAttrArg\n) -> Tag:\n if width < 1 or width > 12:\n raise ValueError(\"Column width must be between 1 and 12\")\n cls = \"col-sm-\" + str(width)\n if offset > 0:\n # offset-md-x is for bootstrap 4 forward compat\n # (every size tier has been bumped up one level)\n # https://github.com/twbs/bootstrap/blob/74b8fe7/docs/4.3/migration/index.html#L659\n off = str(offset)\n cls += f\" offset-md-{off} col-sm-offset-{off}\"\n return div(*args, class_=cls, **kwargs)\n\n\n# TODO: also accept a generic list (and wrap in panel in that case)\ndef layout_sidebar(\n sidebar: TagChildArg, main: TagChildArg, position: Literal[\"left\", \"right\"] = \"left\"\n) -> Tag:\n return row(sidebar, main) if position == \"left\" else row(main, sidebar)\n\n\ndef panel_well(*args: TagChildArg, **kwargs: TagAttrArg) -> Tag:\n return div(*args, class_=\"well\", **kwargs)\n\n\ndef panel_sidebar(*args: TagChildArg, width: int = 4, **kwargs: TagAttrArg) -> Tag:\n return div(\n # A11y semantic landmark for sidebar\n tags.form(*args, role=\"complementary\", class_=\"well\", **kwargs),\n class_=\"col-sm-\" + str(width),\n )\n\n\ndef panel_main(*args: TagChildArg, width: int = 8, **kwargs: TagAttrArg):\n return div(\n # A11y semantic landmark for main region\n *args,\n role=\"main\",\n class_=\"col-sm-\" + str(width),\n **kwargs,\n )\n\n\n# TODO: replace `flowLayout()`/`splitLayout()` with a flexbox wrapper?\n# def panel_input(*args: TagChild, **kwargs: TagAttr):\n# return div(flowLayout(...), class_=\"shiny-input-panel\")\n\n\ndef panel_conditional(\n condition: str,\n *args: TagChildArg,\n # TODO: do we have an answer for shiny::NS() yet?\n ns: Callable[[str], str] = lambda x: x,\n **kwargs: TagAttrArg,\n):\n return div(*args, data_display_if=condition, data_ns_prefix=ns(\"\"), **kwargs)\n\n\ndef panel_title(title: str, windowTitle: Optional[str] = None) -> TagList:\n if windowTitle is None:\n windowTitle = title\n return TagList(\n tags.head(tags.title(windowTitle)),\n h2(title),\n )\n\n\ndef panel_fixed(*args: TagChildArg, **kwargs: TagAttrArg) -> TagList:\n return panel_absolute(*args, fixed=True, **kwargs)\n\n\ndef panel_absolute(\n *args: TagChildArg,\n top: Optional[str] = None,\n left: Optional[str] = None,\n right: Optional[str] = None,\n bottom: Optional[str] = None,\n width: Optional[str] = None,\n height: Optional[str] = None,\n draggable: bool = False,\n fixed: bool = False,\n cursor: Literal[\"auto\", \"move\", \"default\", \"inherit\"] = \"auto\",\n **kwargs: TagAttrArg,\n) -> TagList:\n style = css(\n top=top,\n left=left,\n right=right,\n bottom=bottom,\n width=width,\n height=height,\n position=\"fixed\" if fixed else \"absolute\",\n cursor=\"move\" if draggable else \"inherit\" if cursor == \"auto\" else cursor,\n )\n divTag = div(*args, style=style, **kwargs)\n if not draggable:\n return TagList(divTag)\n divTag.add_class(\"draggable\")\n deps = jqui_deps()\n deps.stylesheet = []\n return TagList(deps, divTag, tags.script(HTML('$(\".draggable\").draggable();')))\n\n\ndef help_text(*args: TagChildArg, **kwargs: TagAttrArg) -> Tag:\n return span(*args, class_=\"help-block\", **kwargs)\n\nshiny/__init__.py METASEP\n\"\"\"Top-level package for Shiny.\"\"\"\n\n__author__ = \"\"\"Winston Chang\"\"\"\n__email__ = \"winston@rstudio.com\"\n__version__ = \"0.0.0.9000\"\n\n# All objects imported into this scope will be available as shiny.foo\nfrom .bootstrap import *\nfrom .input_button import *\nfrom .input_check_radio import *\nfrom .input_date import *\nfrom .input_file import *\nfrom .input_numeric import *\nfrom .input_password import *\nfrom .input_select import *\nfrom .input_slider import *\nfrom .input_text import *\nfrom .modal import *\nfrom .navs import *\nfrom .notifications import *\nfrom .output import *\nfrom .page import *\nfrom .progress import *\nfrom .render import *\nfrom .reactives import *\nfrom .shinyapp import *\nfrom .shinysession import *\nfrom .shinymodule import *\n\nexamples/simple/app.py METASEP\n# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n layout_sidebar(\n panel_sidebar(\n input_slider(\"n\", \"N\", 0, 100, 20),\n ),\n panel_main(\n output_text_verbatim(\"txt\", placeholder=True),\n output_plot(\"plot\"),\n ),\n ),\n)\n\n# from htmltools.core import HTMLDocument\n# from shiny import html_dependencies\n# HTMLDocument(TagList(ui, html_dependencies.shiny_deps())).save_html(\"temp/app.html\")\n\n\n# A ReactiveVal which is exists outside of the session.\nshared_val = ReactiveVal(None)\n\n\ndef server(session: ShinySession):\n @reactive()\n def r():\n if session.input[\"n\"] is None:\n return\n return session.input[\"n\"] * 2\n\n @session.output(\"txt\")\n async def _():\n val = r()\n return f\"n*2 is {val}, session id is {get_current_session().id}\"\n\n\napp = ShinyApp(ui, server)\n\nif __name__ == \"__main__\":\n app.run()\n # Alternately, to listen on a TCP port:\n # app.run(conn_type = \"tcp\")\n\nexamples/myapp/app.py METASEP\n# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\nfrom shiny.fileupload import FileInfo\n\n# For plot rendering\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nui = page_fluid(\n layout_sidebar(\n panel_sidebar(\n input_slider(\"n\", \"N\", 0, 100, 20),\n input_file(\"file1\", \"Choose file\", multiple=True),\n ),\n panel_main(\n output_text_verbatim(\"txt\"),\n output_text_verbatim(\"shared_txt\"),\n output_plot(\"plot\"),\n output_text_verbatim(\"file_content\"),\n ),\n ),\n)\n\n# A ReactiveVal which is shared across all sessions.\nshared_val = ReactiveVal(None)\n\n\ndef server(session: ShinySession):\n @reactive()\n def r():\n if session.input[\"n\"] is None:\n return\n return session.input[\"n\"] * 2\n\n @session.output(\"txt\")\n async def _():\n val = r()\n return f\"n*2 is {val}, session id is {get_current_session().id}\"\n\n # This observer watches n, and changes shared_val, which is shared across\n # all running sessions.\n @observe()\n def _():\n if session.input[\"n\"] is None:\n return\n shared_val(session.input[\"n\"] * 10)\n\n # Print the value of shared_val(). Changing it in one session should cause\n # this to run in all sessions.\n @session.output(\"shared_txt\")\n def _():\n return f\"shared_val() is {shared_val()}\"\n\n @session.output(\"plot\")\n @render_plot(alt=\"A histogram\")\n def _():\n np.random.seed(19680801)\n x = 100 + 15 * np.random.randn(437)\n\n fig, ax = plt.subplots()\n ax.hist(x, session.input[\"n\"], density=True)\n return fig\n\n @session.output(\"file_content\")\n def _():\n file_infos: list[FileInfo] = session.input[\"file1\"]\n if not file_infos:\n return\n\n out_str = \"\"\n for file_info in file_infos:\n out_str += \"====== \" + file_info[\"name\"] + \" ======\\n\"\n with open(file_info[\"datapath\"], \"r\") as f:\n out_str += f.read()\n\n return out_str\n\n\napp = ShinyApp(ui, server)\n\nif __name__ == \"__main__\":\n app.run()\n # Alternately, to listen on a TCP port:\n # app.run(conn_type = \"tcp\")\n\nexamples/moduleapp/app.py METASEP\n# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\n# =============================================================================\n# Counter module\n# =============================================================================\ndef counter_module_ui(\n ns: Callable[[str], str], label: str = \"Increment counter\"\n) -> TagChildArg:\n return TagList(\n input_button(id=ns(\"button\"), label=label),\n output_text_verbatim(id=ns(\"out\")),\n )\n\n\ndef counter_module_server(session: ShinySessionProxy):\n count: ReactiveVal[int] = ReactiveVal(0)\n\n @observe()\n def _():\n session.input[\"button\"]\n isolate(lambda: count(count() + 1))\n\n @session.output(\"out\")\n def _() -> str:\n return f\"Click count is {count()}\"\n\n\ncounter_module = ShinyModule(counter_module_ui, counter_module_server)\n\n\n# =============================================================================\n# App that uses module\n# =============================================================================\nui = page_fluid(\n counter_module.ui(\"counter1\", \"Counter 1\"),\n counter_module.ui(\"counter2\", \"Counter 2\"),\n)\n\n\ndef server(session: ShinySession):\n counter_module.server(\"counter1\")\n counter_module.server(\"counter2\")\n\n\napp = ShinyApp(ui, server)\n\n\nif __name__ == \"__main__\":\n app.run()\n\nexamples/inputs/app.py METASEP\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nimport os\nimport sys\n\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\n\nfrom shiny import *\nfrom htmltools import tags, HTML\nfrom fontawesome import icon_svg\n\nui = page_fluid(\n panel_title(\"Hello prism ui\"),\n layout_sidebar(\n panel_sidebar(\n input_slider(\n \"n\", \"input_slider()\", min=10, max=100, value=50, step=5, animate=True\n ),\n input_date(\"date\", \"input_date()\"),\n input_date_range(\"date_rng\", \"input_date_range()\"),\n input_text(\"txt\", \"input_text()\", placeholder=\"Input some text\"),\n input_text_area(\n \"txt_area\", \"input_text_area()\", placeholder=\"Input some text\"\n ),\n input_numeric(\"num\", \"input_numeric()\", 20),\n input_password(\"password\", \"input_password()\"),\n input_checkbox(\"checkbox\", \"input_checkbox()\"),\n input_checkbox_group(\n \"checkbox_group\",\n \"input_checkbox_group()\",\n {\"Choice 1\": \"a\", \"Choice 2\": \"b\"},\n selected=\"b\",\n inline=True,\n ),\n input_radio_buttons(\n \"radio\", \"input_radio()\", {\"Choice 1\": \"a\", \"Choice 2\": \"b\"}\n ),\n input_select(\n \"select\",\n \"input_select()\",\n {\n \"Choice A\": \"a\",\n \"Group B\": {\"Choice B1\": \"b1\", \"Choice B2\": \"b2\"},\n \"Group C\": [\"c1\", \"c2\"],\n },\n ),\n input_button(\"button\", \"input_button()\", icon=icon_svg(\"check\")),\n input_file(\"file\", \"File upload\"),\n ),\n panel_main(\n output_plot(\"plot\"),\n navs_tab_card(\n # TODO: output_plot() within a tab not working?\n nav(\"Inputs\", output_ui(\"inputs\"), icon=icon_svg(\"code\")),\n nav(\n \"Image\", output_image(\"image\", inline=True), icon=icon_svg(\"image\")\n ),\n nav(\n \"Misc\",\n input_link(\n \"link\", \"Show notification/progress\", icon=icon_svg(\"info\")\n ),\n tags.br(),\n input_button(\"btn\", \"Show modal\", icon=icon_svg(\"info-circle\")),\n panel_fixed(\n panel_well(\n \"A fixed, draggable, panel\",\n input_checkbox(\"checkbox2\", \"Check me!\"),\n panel_conditional(\n \"input.checkbox2 == true\", \"Thanks for checking!\"\n ),\n ),\n draggable=True,\n width=\"fit-content\",\n height=\"50px\",\n top=\"50px\",\n right=\"50px\",\n ),\n icon=icon_svg(\"code\"),\n ),\n ),\n ),\n ),\n)\n\n\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\ndef server(s: ShinySession):\n @s.output(\"inputs\")\n @render_ui()\n def _() -> Tag:\n vals = [\n f\"input_date() {s.input['date']}\",\n f\"input_date_range(): {s.input['date_rng']}\",\n f\"input_text(): {s.input['txt']}\",\n f\"input_text_area(): {s.input['txt_area']}\",\n f\"input_numeric(): {s.input['num']}\",\n f\"input_password(): {s.input['password']}\",\n f\"input_checkbox(): {s.input['checkbox']}\",\n f\"input_checkbox_group(): {s.input['checkbox_group']}\",\n f\"input_radio(): {s.input['radio']}\",\n f\"input_select(): {s.input['select']}\",\n f\"input_button(): {s.input['button']}\",\n ]\n return tags.pre(HTML(\"\\n\".join(vals)))\n\n np.random.seed(19680801)\n x_rand = 100 + 15 * np.random.randn(437)\n\n @s.output(\"plot\")\n @render_plot(alt=\"A histogram\")\n def _():\n fig, ax = plt.subplots()\n ax.hist(x_rand, int(s.input[\"n\"]), density=True)\n return fig\n\n @s.output(\"image\")\n @render_image()\n def _():\n from pathlib import Path\n\n dir = Path(__file__).resolve().parent\n return {\"src\": dir / \"rstudio-logo.png\", \"width\": \"150px\"}\n\n @observe()\n def _():\n btn = s.input[\"btn\"]\n if btn and btn > 0:\n modal_show(modal(\"Hello there!\", easy_close=True))\n\n @observe()\n def _():\n link = s.input[\"link\"]\n if link and link > 0:\n notification_show(\"A notification!\")\n p = Progress()\n import time\n\n for i in range(30):\n p.set(i / 30, message=\"Computing\")\n time.sleep(0.1)\n p.close()\n\n\napp = ShinyApp(ui, server)\nif __name__ == \"__main__\":\n app.run()\n\nexamples/dynamic_ui/app.py METASEP\n# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\n# For plot rendering\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nui = page_fluid(\n layout_sidebar(\n panel_sidebar(h2(\"Dynamic UI\"), output_ui(\"ui\")),\n panel_main(\n output_text_verbatim(\"txt\"),\n output_plot(\"plot\"),\n ),\n ),\n)\n\n\ndef server(session: ShinySession):\n @reactive()\n def r():\n if session.input[\"n\"] is None:\n return\n return session.input[\"n\"] * 2\n\n @session.output(\"txt\")\n async def _():\n val = r()\n return f\"n*2 is {val}, session id is {get_current_session().id}\"\n\n @session.output(\"plot\")\n @render_plot(alt=\"A histogram\")\n def _():\n np.random.seed(19680801)\n x = 100 + 15 * np.random.randn(437)\n\n fig, ax = plt.subplots()\n ax.hist(x, session.input[\"n\"], density=True)\n return fig\n\n @session.output(\"ui\")\n @render_ui()\n def _():\n return input_slider(\"n\", \"N\", 0, 100, 20)\n\n\napp = ShinyApp(ui, server)\n\nif __name__ == \"__main__\":\n app.run()\n # Alternately, to listen on a TCP port:\n # app.run(conn_type = \"tcp\")\n\nsetup.py METASEP\n#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\nfrom setuptools import setup, find_packages\n\nrequirements = []\n\ntest_requirements = [\n \"pytest>=3\",\n]\n\nsetup(\n author=\"Winston Chang\",\n author_email=\"winston@rstudio.com\",\n python_requires=\">=3.7\",\n classifiers=[\n \"Development Status :: 2 - Pre-Alpha\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: GNU General Public License v3 (GPLv3)\",\n \"Natural Language :: English\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n ],\n description=\"A web development framework for Python.\",\n install_requires=requirements,\n license=\"GNU General Public License v3\",\n include_package_data=True,\n keywords=\"shiny\",\n name=\"shiny\",\n packages=find_packages(include=[\"shiny\", \"shiny.*\"]),\n package_data={\n \"shiny\": [\"py.typed\"],\n },\n test_suite=\"tests\",\n tests_require=test_requirements,\n url=\"https://github.com/rstudio/prism\",\n version=\"0.0.0.9000\",\n zip_safe=False,\n)\n\nexamples/update-input/app.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()","type":"infile"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================\n # Change both the label and the text\n await update_text(\n \"inText\",\n label=\"New \" + c_label,\n value=\"New text \" + str(c_num),\n )\n\n # Number ===================================================\n # Change the value\n await update_numeric(\"inNumber\", value=c_num)\n\n # Change the label, value, min, and max\n await update_numeric(\n \"inNumber2\",\n label=\"Number \" + c_label,\n value=c_num,\n min=c_num - 10,\n max=c_num + 10,\n step=5,\n )\n\n # Slider input =============================================\n # Only label and value can be set for slider\n await update_slider(\"inSlider\", label=\"Slider \" + c_label, value=c_num)\n\n # Slider range input =======================================\n # For sliders that pick out a range, pass in a vector of 2\n # values.\n await update_slider(\"inSlider2\", value=(c_num - 1, c_num + 1))\n\n # TODO: an NA means to not change that value (the low or high one)\n # await update_slider(\n # \"inSlider3\",\n # value=(NA, c_num+2)\n # )\n\n # Date input ===============================================\n # Only label and value can be set for date input\n await update_date(\"inDate\", label=\"Date \" + c_label, value=date(2013, 4, c_num))\n\n # Date range input =========================================\n # Only label and value can be set for date range input\n await update_date_range(\n \"inDateRange\",\n label=\"Date range \" + c_label,\n start=date(2013, 1, c_num),\n end=date(2013, 12, c_num),\n min=date(2001, 1, c_num),\n max=date(2030, 1, c_num),\n )\n\n # # Checkbox ===============================================\n await update_checkbox(\"inCheckbox\", value=c_num % 2)\n\n # Checkbox group ===========================================\n # Create a list of new options, where the name of the items\n # is something like 'option label x A', and the values are\n # 'option-x-A'.\n opts = zip(\n [f\"option label {c_num} {type}\" for type in [\"A\", \"B\"]],\n [f\"option-{c_num}-{type}\" for type in [\"A\", \"B\"]],\n )\n\n # Set the label, choices, and selected item\n await update_checkbox_group(\n \"inCheckboxGroup\",\n label=\"Checkbox group \" + c_label,\n choices=tuple(opts),\n selected=f\"option-{c_num}-A\",\n )\n\n # Radio group ==============================================\n await update_radio_buttons(\n \"inRadio\",\n label=\"Radio \" + c_label,\n choices=tuple(opts),\n selected=f\"option-{c_num}-A\",\n )\n # Select input =============================================\n # Create a list of new options, where the name of the items\n # is something like 'option label x A', and the values are\n # 'option-x-A'.\n await update_select(\n \"inSelect\",\n label=\"Select \" + c_label,\n choices=dict(opts),\n selected=f\"option-{c_num}-A\",\n )\n\n # Can also set the label and select an item (or more than\n # one if it's a multi-select)\n await update_select(\n \"inSelect2\",\n label=\"Select label \" + c_label,\n choices=dict(opts),\n selected=f\"option-{c_num}-B\",\n )\n\n # Tabset input =============================================\n # Change the selected tab.\n # The tabsetPanel must have been created with an 'id' argument\n await nav_select(\"inTabset\", selected=\"panel2\" if c_num % 2 else \"panel1\")\n\n","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):","type":"inproject"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================\n # Change both the label and the text\n await update_text(\n \"inText\",\n label=\"New \" + c_label,\n value=\"New text \" + str(c_num),\n )\n\n # Number ===================================================\n # Change the value\n await update_numeric(\"inNumber\", value=c_num)\n\n # Change the label, value, min, and max\n await update_numeric(\n \"inNumber2\",\n label=\"Number \" + c_label,\n value=c_num,\n min=c_num - 10,\n max=c_num + 10,\n step=5,\n )\n\n # Slider input =============================================\n # Only label and value can be set for slider\n await update_slider(\"inSlider\", label=\"Slider \" + c_label, value=c_num)\n\n # Slider range input =======================================\n # For sliders that pick out a range, pass in a vector of 2\n # values.\n await update_slider(\"inSlider2\", value=(c_num - 1, c_num + 1))\n\n # TODO: an NA means to not change that value (the low or high one)\n # await update_slider(\n # \"inSlider3\",\n # value=(NA, c_num+2)\n # )\n\n # Date input ===============================================\n # Only label and value can be set for date input\n await update_date(\"inDate\", label=\"Date \" + c_label, value=date(2013, 4, c_num))\n\n # Date range input =========================================\n # Only label and value can be set for date range input\n await update_date_range(\n \"inDateRange\",\n label=\"Date range \" + c_label,\n start=date(2013, 1, c_num),\n end=date(2013, 12, c_num),\n min=date(2001, 1, c_num),\n max=date(2030, 1, c_num),\n )\n\n # # Checkbox ===============================================\n await update_checkbox(\"inCheckbox\", value=c_num % 2)\n\n # Checkbox group ===========================================\n # Create a list of new options, where the name of the items\n # is something like 'option label x A', and the values are\n # 'option-x-A'.\n opts = zip(\n [f\"option label {c_num} {type}\" for type in [\"A\", \"B\"]],\n [f\"option-{c_num}-{type}\" for type in [\"A\", \"B\"]],\n )\n\n # Set the label, choices, and selected item\n await update_checkbox_group(\n \"inCheckboxGroup\",\n label=\"Checkbox group \" + c_label,\n choices=tuple(opts),\n selected=f\"option-{c_num}-A\",\n )\n\n # Radio group ==============================================\n await update_radio_buttons(\n \"inRadio\",\n label=\"Radio \" + c_label,\n choices=tuple(opts),\n selected=f\"option-{c_num}-A\",\n )\n # Select input =============================================\n # Create a list of new options, where the name of the items\n # is something like 'option label x A', and the values are\n # 'option-x-A'.\n await update_select(\n \"inSelect\",\n label=\"Select \" + c_label,\n choices=dict(opts),\n selected=f\"option-{c_num}-A\",\n )\n\n # Can also set the label and select an item (or more than\n # one if it's a multi-select)\n await update_select(\n \"inSelect2\",\n label=\"Select label \" + c_label,\n choices=dict(opts),\n selected=f\"option-{c_num}-B\",\n )\n\n # Tabset input =============================================\n # Change the selected tab.\n # The tabsetPanel must have been created with an 'id' argument\n await nav_select(\"inTabset\", selected=\"panel2\" if c_num % 2 else \"panel1\")\n\n\napp = ShinyApp(ui, server, debug=True)\n\nif __name__ == \"__main__\":","type":"common"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================\n # Change both the label and the text\n await update_text(\n \"inText\",\n label=\"New \" + c_label,\n value=\"New text \" + str(c_num),\n )\n\n # Number ===================================================\n # Change the value\n await update_numeric(\"inNumber\", value=c_num)\n\n # Change the label, value, min, and max\n await update_numeric(\n \"inNumber2\",\n label=\"Number \" + c_label,\n value=c_num,\n min=c_num - 10,\n max=c_num + 10,\n step=5,\n )\n\n # Slider input =============================================\n # Only label and value can be set for slider\n await update_slider(\"inSlider\", label=\"Slider \" + c_label, value=c_num)\n\n # Slider range input =======================================\n # For sliders that pick out a range, pass in a vector of 2\n # values.\n await update_slider(\"inSlider2\", value=(c_num - 1, c_num + 1))\n\n # TODO: an NA means to not change that value (the low or high one)\n # await update_slider(\n # \"inSlider3\",\n # value=(NA, c_num+2)\n # )\n\n # Date input ===============================================\n # Only label and value can be set for date input\n await update_date(\"inDate\", label=\"Date \" + c_label, value=date(2013, 4, c_num))\n\n # Date range input =========================================\n # Only label and value can be set for date range input\n await update_date_range(\n \"inDateRange\",\n label=\"Date range \" + c_label,","type":"common"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================\n # Change both the label and the text\n await update_text(\n \"inText\",\n label=\"New \" + c_label,\n value=\"New text \" + str(c_num),\n )\n\n # Number ===================================================\n # Change the value\n await update_numeric(\"inNumber\", value=c_num)\n\n # Change the label, value, min, and max\n await update_numeric(\n \"inNumber2\",\n label=\"Number \" + c_label,\n value=c_num,\n min=c_num - 10,\n max=c_num + 10,\n step=5,\n )\n\n # Slider input =============================================\n # Only label and value can be set for slider\n await update_slider(\"inSlider\", label=\"Slider \" + c_label, value=c_num)\n\n # Slider range input =======================================\n # For sliders that pick out a range, pass in a vector of 2\n # values.\n await update_slider(\"inSlider2\", value=(c_num - 1, c_num + 1))\n\n # TODO: an NA means to not change that value (the low or high one)\n # await update_slider(\n # \"inSlider3\",\n # value=(NA, c_num+2)\n # )","type":"non_informative"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================\n # Change both the label and the text\n await update_text(\n \"inText\",\n label=\"New \" + c_label,\n value=\"New text \" + str(c_num),\n )\n\n # Number ===================================================\n # Change the value\n await update_numeric(\"inNumber\", value=c_num)\n\n # Change the label, value, min, and max\n await update_numeric(\n \"inNumber2\",\n label=\"Number \" + c_label,\n value=c_num,\n min=c_num - 10,\n max=c_num + 10,\n step=5,\n )\n\n # Slider input =============================================\n # Only label and value can be set for slider\n await update_slider(\"inSlider\", label=\"Slider \" + c_label, value=c_num)\n\n # Slider range input =======================================\n # For sliders that pick out a range, pass in a vector of 2\n # values.\n await update_slider(\"inSlider2\", value=(c_num - 1, c_num + 1))\n\n # TODO: an NA means to not change that value (the low or high one)\n # await update_slider(\n # \"inSlider3\",\n # value=(NA, c_num+2)\n # )\n","type":"non_informative"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================","type":"non_informative"},{"content":"# To run this app:","type":"non_informative"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5","type":"random"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================\n # Change both the label and the text\n await update_text(\n \"inText\",\n label=\"New \" + c_label,\n value=\"New text \" + str(c_num),\n )\n\n # Number ===================================================\n # Change the value\n await update_numeric(\"inNumber\", value=c_num)\n\n # Change the label, value, min, and max\n await update_numeric(\n \"inNumber2\",\n label=\"Number \" + c_label,\n value=c_num,\n min=c_num - 10,\n max=c_num + 10,\n step=5,\n )\n\n # Slider input =============================================\n # Only label and value can be set for slider\n await update_slider(\"inSlider\", label=\"Slider \" + c_label, value=c_num)\n\n # Slider range input =======================================\n # For sliders that pick out a range, pass in a vector of 2\n # values.\n await update_slider(\"inSlider2\", value=(c_num - 1, c_num + 1))\n\n # TODO: an NA means to not change that value (the low or high one)\n # await update_slider(\n # \"inSlider3\",\n # value=(NA, c_num+2)\n # )\n\n # Date input ===============================================\n # Only label and value can be set for date input\n await update_date(\"inDate\", label=\"Date \" + c_label, value=date(2013, 4, c_num))\n\n # Date range input =========================================\n # Only label and value can be set for date range input\n await update_date_range(\n \"inDateRange\",\n label=\"Date range \" + c_label,\n start=date(2013, 1, c_num),\n end=date(2013, 12, c_num),\n min=date(2001, 1, c_num),\n max=date(2030, 1, c_num),\n )\n\n # # Checkbox ===============================================\n await update_checkbox(\"inCheckbox\", value=c_num % 2)\n\n # Checkbox group ===========================================\n # Create a list of new options, where the name of the items\n # is something like 'option label x A', and the values are\n # 'option-x-A'.\n opts = zip(\n [f\"option label {c_num} {type}\" for type in [\"A\", \"B\"]],\n [f\"option-{c_num}-{type}\" for type in [\"A\", \"B\"]],\n )\n\n # Set the label, choices, and selected item","type":"random"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================\n # Change both the label and the text\n await update_text(\n \"inText\",\n label=\"New \" + c_label,\n value=\"New text \" + str(c_num),\n )\n\n # Number ===================================================\n # Change the value\n await update_numeric(\"inNumber\", value=c_num)\n\n # Change the label, value, min, and max\n await update_numeric(\n \"inNumber2\",\n label=\"Number \" + c_label,\n value=c_num,\n min=c_num - 10,\n max=c_num + 10,\n step=5,\n )\n\n # Slider input =============================================\n # Only label and value can be set for slider\n await update_slider(\"inSlider\", label=\"Slider \" + c_label, value=c_num)\n\n # Slider range input =======================================\n # For sliders that pick out a range, pass in a vector of 2\n # values.\n await update_slider(\"inSlider2\", value=(c_num - 1, c_num + 1))\n\n # TODO: an NA means to not change that value (the low or high one)\n # await update_slider(\n # \"inSlider3\",\n # value=(NA, c_num+2)\n # )\n\n # Date input ===============================================\n # Only label and value can be set for date input\n await update_date(\"inDate\", label=\"Date \" + c_label, value=date(2013, 4, c_num))\n\n # Date range input =========================================\n # Only label and value can be set for date range input\n await update_date_range(\n \"inDateRange\",\n label=\"Date range \" + c_label,\n start=date(2013, 1, c_num),\n end=date(2013, 12, c_num),\n min=date(2001, 1, c_num),\n max=date(2030, 1, c_num),\n )\n\n # # Checkbox ===============================================\n await update_checkbox(\"inCheckbox\", value=c_num % 2)\n\n # Checkbox group ===========================================\n # Create a list of new options, where the name of the items\n # is something like 'option label x A', and the values are\n # 'option-x-A'.\n opts = zip(","type":"random"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(","type":"random"},{"content":"# To run this app:\n# python3 app.py\n\n# Then point web browser to:\n# http://localhost:8000/\n\n# Add parent directory to path, so we can find the prism module.\n# (This is just a temporary fix)\nimport os\nimport sys\n\n# This will load the shiny module dynamically, without having to install it.\n# This makes the debug/run cycle quicker.\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nsys.path.insert(0, shiny_module_dir)\n\nfrom shiny import *\n\nui = page_fluid(\n panel_title(\"Changing the values of inputs from the server\"),\n row(\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs control the other inputs on the page\"),\n input_text(\n \"control_label\", \"This controls some of the labels:\", \"LABEL TEXT\"\n ),\n input_slider(\n \"control_num\", \"This controls values:\", min=1, max=20, value=15\n ),\n ),\n ),\n column(\n 3,\n panel_well(\n tags.h4(\"These inputs are controlled by the other inputs\"),\n input_text(\"inText\", \"Text input:\", value=\"start text\"),\n input_numeric(\n \"inNumber\", \"Number input:\", min=1, max=20, value=5, step=0.5\n ),\n input_numeric(\n \"inNumber2\", \"Number input 2:\", min=1, max=20, value=5, step=0.5\n ),\n input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),\n input_slider(\n \"inSlider2\", \"Slider input 2:\", min=1, max=20, value=(5, 15)\n ),\n input_slider(\n \"inSlider3\", \"Slider input 3:\", min=1, max=20, value=(5, 15)\n ),\n input_date(\"inDate\", \"Date input:\"),\n input_date_range(\"inDateRange\", \"Date range input:\"),\n ),\n ),\n column(\n 3,\n panel_well(\n input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),\n input_checkbox_group(\n \"inCheckboxGroup\",\n \"Checkbox group input:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_radio_buttons(\n \"inRadio\",\n \"Radio buttons:\",\n (\"label 1\", \"option1\", \"label 2\", \"option2\"),\n ),\n input_select(\n \"inSelect\",\n \"Select input:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n ),\n input_select(\n \"inSelect2\",\n \"Select input 2:\",\n {\"label 1\": \"option1\", \"label 2\": \"option2\"},\n multiple=True,\n ),\n ),\n navs_tab(\n nav(\"panel1\", h2(\"This is the first panel.\")),\n nav(\"panel2\", h2(\"This is the second panel.\")),\n id=\"inTabset\",\n ),\n ),\n ),\n)\n\n\ndef server(sess: ShinySession):\n @observe_async()\n async def _():\n # We'll use these multiple times, so use short var names for\n # convenience.\n c_label = sess.input[\"control_label\"]\n c_num = sess.input[\"control_num\"]\n\n print(c_label)\n print(c_num)\n\n # Text =====================================================\n # Change both the label and the text\n await update_text(\n \"inText\",\n label=\"New \" + c_label,\n value=\"New text \" + str(c_num),\n )\n\n # Number ===================================================\n # Change the value\n await update_numeric(\"inNumber\", value=c_num)\n\n # Change the label, value, min, and max\n await update_numeric(\n \"inNumber2\",\n label=\"Number \" + c_label,\n value=c_num,\n min=c_num - 10,\n max=c_num + 10,\n step=5,\n )\n\n # Slider input =============================================\n # Only label and value can be set for slider\n await update_slider(\"inSlider\", label=\"Slider \" + c_label, value=c_num)\n\n # Slider range input =======================================\n # For sliders that pick out a range, pass in a vector of 2\n # values.\n await update_slider(\"inSlider2\", value=(c_num - 1, c_num + 1))\n\n # TODO: an NA means to not change that value (the low or high one)\n # await update_slider(\n # \"inSlider3\",\n # value=(NA, c_num+2)\n # )\n\n # Date input ===============================================\n # Only label and value can be set for date input\n await update_date(\"inDate\", label=\"Date \" + c_label, value=date(2013, 4, c_num))\n\n # Date range input =========================================\n # Only label and value can be set for date range input\n await update_date_range(\n \"inDateRange\",\n label=\"Date range \" + c_label,\n start=date(2013, 1, c_num),\n end=date(2013, 12, c_num),\n min=date(2001, 1, c_num),\n max=date(2030, 1, c_num),\n )\n\n # # Checkbox ===============================================\n await update_checkbox(\"inCheckbox\", value=c_num % 2)\n\n # Checkbox group ===========================================\n # Create a list of new options, where the name of the items\n # is something like 'option label x A', and the values are\n # 'option-x-A'.\n opts = zip(\n [f\"option label {c_num} {type}\" for type in [\"A\", \"B\"]],\n [f\"option-{c_num}-{type}\" for type in [\"A\", \"B\"]],\n )\n\n # Set the label, choices, and selected item\n await update_checkbox_group(","type":"random"}],"string":"[\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\\n # Change both the label and the text\\n await update_text(\\n \\\"inText\\\",\\n label=\\\"New \\\" + c_label,\\n value=\\\"New text \\\" + str(c_num),\\n )\\n\\n # Number ===================================================\\n # Change the value\\n await update_numeric(\\\"inNumber\\\", value=c_num)\\n\\n # Change the label, value, min, and max\\n await update_numeric(\\n \\\"inNumber2\\\",\\n label=\\\"Number \\\" + c_label,\\n value=c_num,\\n min=c_num - 10,\\n max=c_num + 10,\\n step=5,\\n )\\n\\n # Slider input =============================================\\n # Only label and value can be set for slider\\n await update_slider(\\\"inSlider\\\", label=\\\"Slider \\\" + c_label, value=c_num)\\n\\n # Slider range input =======================================\\n # For sliders that pick out a range, pass in a vector of 2\\n # values.\\n await update_slider(\\\"inSlider2\\\", value=(c_num - 1, c_num + 1))\\n\\n # TODO: an NA means to not change that value (the low or high one)\\n # await update_slider(\\n # \\\"inSlider3\\\",\\n # value=(NA, c_num+2)\\n # )\\n\\n # Date input ===============================================\\n # Only label and value can be set for date input\\n await update_date(\\\"inDate\\\", label=\\\"Date \\\" + c_label, value=date(2013, 4, c_num))\\n\\n # Date range input =========================================\\n # Only label and value can be set for date range input\\n await update_date_range(\\n \\\"inDateRange\\\",\\n label=\\\"Date range \\\" + c_label,\\n start=date(2013, 1, c_num),\\n end=date(2013, 12, c_num),\\n min=date(2001, 1, c_num),\\n max=date(2030, 1, c_num),\\n )\\n\\n # # Checkbox ===============================================\\n await update_checkbox(\\\"inCheckbox\\\", value=c_num % 2)\\n\\n # Checkbox group ===========================================\\n # Create a list of new options, where the name of the items\\n # is something like 'option label x A', and the values are\\n # 'option-x-A'.\\n opts = zip(\\n [f\\\"option label {c_num} {type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\\n [f\\\"option-{c_num}-{type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\\n )\\n\\n # Set the label, choices, and selected item\\n await update_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n label=\\\"Checkbox group \\\" + c_label,\\n choices=tuple(opts),\\n selected=f\\\"option-{c_num}-A\\\",\\n )\\n\\n # Radio group ==============================================\\n await update_radio_buttons(\\n \\\"inRadio\\\",\\n label=\\\"Radio \\\" + c_label,\\n choices=tuple(opts),\\n selected=f\\\"option-{c_num}-A\\\",\\n )\\n # Select input =============================================\\n # Create a list of new options, where the name of the items\\n # is something like 'option label x A', and the values are\\n # 'option-x-A'.\\n await update_select(\\n \\\"inSelect\\\",\\n label=\\\"Select \\\" + c_label,\\n choices=dict(opts),\\n selected=f\\\"option-{c_num}-A\\\",\\n )\\n\\n # Can also set the label and select an item (or more than\\n # one if it's a multi-select)\\n await update_select(\\n \\\"inSelect2\\\",\\n label=\\\"Select label \\\" + c_label,\\n choices=dict(opts),\\n selected=f\\\"option-{c_num}-B\\\",\\n )\\n\\n # Tabset input =============================================\\n # Change the selected tab.\\n # The tabsetPanel must have been created with an 'id' argument\\n await nav_select(\\\"inTabset\\\", selected=\\\"panel2\\\" if c_num % 2 else \\\"panel1\\\")\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\\n # Change both the label and the text\\n await update_text(\\n \\\"inText\\\",\\n label=\\\"New \\\" + c_label,\\n value=\\\"New text \\\" + str(c_num),\\n )\\n\\n # Number ===================================================\\n # Change the value\\n await update_numeric(\\\"inNumber\\\", value=c_num)\\n\\n # Change the label, value, min, and max\\n await update_numeric(\\n \\\"inNumber2\\\",\\n label=\\\"Number \\\" + c_label,\\n value=c_num,\\n min=c_num - 10,\\n max=c_num + 10,\\n step=5,\\n )\\n\\n # Slider input =============================================\\n # Only label and value can be set for slider\\n await update_slider(\\\"inSlider\\\", label=\\\"Slider \\\" + c_label, value=c_num)\\n\\n # Slider range input =======================================\\n # For sliders that pick out a range, pass in a vector of 2\\n # values.\\n await update_slider(\\\"inSlider2\\\", value=(c_num - 1, c_num + 1))\\n\\n # TODO: an NA means to not change that value (the low or high one)\\n # await update_slider(\\n # \\\"inSlider3\\\",\\n # value=(NA, c_num+2)\\n # )\\n\\n # Date input ===============================================\\n # Only label and value can be set for date input\\n await update_date(\\\"inDate\\\", label=\\\"Date \\\" + c_label, value=date(2013, 4, c_num))\\n\\n # Date range input =========================================\\n # Only label and value can be set for date range input\\n await update_date_range(\\n \\\"inDateRange\\\",\\n label=\\\"Date range \\\" + c_label,\\n start=date(2013, 1, c_num),\\n end=date(2013, 12, c_num),\\n min=date(2001, 1, c_num),\\n max=date(2030, 1, c_num),\\n )\\n\\n # # Checkbox ===============================================\\n await update_checkbox(\\\"inCheckbox\\\", value=c_num % 2)\\n\\n # Checkbox group ===========================================\\n # Create a list of new options, where the name of the items\\n # is something like 'option label x A', and the values are\\n # 'option-x-A'.\\n opts = zip(\\n [f\\\"option label {c_num} {type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\\n [f\\\"option-{c_num}-{type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\\n )\\n\\n # Set the label, choices, and selected item\\n await update_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n label=\\\"Checkbox group \\\" + c_label,\\n choices=tuple(opts),\\n selected=f\\\"option-{c_num}-A\\\",\\n )\\n\\n # Radio group ==============================================\\n await update_radio_buttons(\\n \\\"inRadio\\\",\\n label=\\\"Radio \\\" + c_label,\\n choices=tuple(opts),\\n selected=f\\\"option-{c_num}-A\\\",\\n )\\n # Select input =============================================\\n # Create a list of new options, where the name of the items\\n # is something like 'option label x A', and the values are\\n # 'option-x-A'.\\n await update_select(\\n \\\"inSelect\\\",\\n label=\\\"Select \\\" + c_label,\\n choices=dict(opts),\\n selected=f\\\"option-{c_num}-A\\\",\\n )\\n\\n # Can also set the label and select an item (or more than\\n # one if it's a multi-select)\\n await update_select(\\n \\\"inSelect2\\\",\\n label=\\\"Select label \\\" + c_label,\\n choices=dict(opts),\\n selected=f\\\"option-{c_num}-B\\\",\\n )\\n\\n # Tabset input =============================================\\n # Change the selected tab.\\n # The tabsetPanel must have been created with an 'id' argument\\n await nav_select(\\\"inTabset\\\", selected=\\\"panel2\\\" if c_num % 2 else \\\"panel1\\\")\\n\\n\\napp = ShinyApp(ui, server, debug=True)\\n\\nif __name__ == \\\"__main__\\\":\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\\n # Change both the label and the text\\n await update_text(\\n \\\"inText\\\",\\n label=\\\"New \\\" + c_label,\\n value=\\\"New text \\\" + str(c_num),\\n )\\n\\n # Number ===================================================\\n # Change the value\\n await update_numeric(\\\"inNumber\\\", value=c_num)\\n\\n # Change the label, value, min, and max\\n await update_numeric(\\n \\\"inNumber2\\\",\\n label=\\\"Number \\\" + c_label,\\n value=c_num,\\n min=c_num - 10,\\n max=c_num + 10,\\n step=5,\\n )\\n\\n # Slider input =============================================\\n # Only label and value can be set for slider\\n await update_slider(\\\"inSlider\\\", label=\\\"Slider \\\" + c_label, value=c_num)\\n\\n # Slider range input =======================================\\n # For sliders that pick out a range, pass in a vector of 2\\n # values.\\n await update_slider(\\\"inSlider2\\\", value=(c_num - 1, c_num + 1))\\n\\n # TODO: an NA means to not change that value (the low or high one)\\n # await update_slider(\\n # \\\"inSlider3\\\",\\n # value=(NA, c_num+2)\\n # )\\n\\n # Date input ===============================================\\n # Only label and value can be set for date input\\n await update_date(\\\"inDate\\\", label=\\\"Date \\\" + c_label, value=date(2013, 4, c_num))\\n\\n # Date range input =========================================\\n # Only label and value can be set for date range input\\n await update_date_range(\\n \\\"inDateRange\\\",\\n label=\\\"Date range \\\" + c_label,\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\\n # Change both the label and the text\\n await update_text(\\n \\\"inText\\\",\\n label=\\\"New \\\" + c_label,\\n value=\\\"New text \\\" + str(c_num),\\n )\\n\\n # Number ===================================================\\n # Change the value\\n await update_numeric(\\\"inNumber\\\", value=c_num)\\n\\n # Change the label, value, min, and max\\n await update_numeric(\\n \\\"inNumber2\\\",\\n label=\\\"Number \\\" + c_label,\\n value=c_num,\\n min=c_num - 10,\\n max=c_num + 10,\\n step=5,\\n )\\n\\n # Slider input =============================================\\n # Only label and value can be set for slider\\n await update_slider(\\\"inSlider\\\", label=\\\"Slider \\\" + c_label, value=c_num)\\n\\n # Slider range input =======================================\\n # For sliders that pick out a range, pass in a vector of 2\\n # values.\\n await update_slider(\\\"inSlider2\\\", value=(c_num - 1, c_num + 1))\\n\\n # TODO: an NA means to not change that value (the low or high one)\\n # await update_slider(\\n # \\\"inSlider3\\\",\\n # value=(NA, c_num+2)\\n # )\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\\n # Change both the label and the text\\n await update_text(\\n \\\"inText\\\",\\n label=\\\"New \\\" + c_label,\\n value=\\\"New text \\\" + str(c_num),\\n )\\n\\n # Number ===================================================\\n # Change the value\\n await update_numeric(\\\"inNumber\\\", value=c_num)\\n\\n # Change the label, value, min, and max\\n await update_numeric(\\n \\\"inNumber2\\\",\\n label=\\\"Number \\\" + c_label,\\n value=c_num,\\n min=c_num - 10,\\n max=c_num + 10,\\n step=5,\\n )\\n\\n # Slider input =============================================\\n # Only label and value can be set for slider\\n await update_slider(\\\"inSlider\\\", label=\\\"Slider \\\" + c_label, value=c_num)\\n\\n # Slider range input =======================================\\n # For sliders that pick out a range, pass in a vector of 2\\n # values.\\n await update_slider(\\\"inSlider2\\\", value=(c_num - 1, c_num + 1))\\n\\n # TODO: an NA means to not change that value (the low or high one)\\n # await update_slider(\\n # \\\"inSlider3\\\",\\n # value=(NA, c_num+2)\\n # )\\n\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# To run this app:\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\\n # Change both the label and the text\\n await update_text(\\n \\\"inText\\\",\\n label=\\\"New \\\" + c_label,\\n value=\\\"New text \\\" + str(c_num),\\n )\\n\\n # Number ===================================================\\n # Change the value\\n await update_numeric(\\\"inNumber\\\", value=c_num)\\n\\n # Change the label, value, min, and max\\n await update_numeric(\\n \\\"inNumber2\\\",\\n label=\\\"Number \\\" + c_label,\\n value=c_num,\\n min=c_num - 10,\\n max=c_num + 10,\\n step=5,\\n )\\n\\n # Slider input =============================================\\n # Only label and value can be set for slider\\n await update_slider(\\\"inSlider\\\", label=\\\"Slider \\\" + c_label, value=c_num)\\n\\n # Slider range input =======================================\\n # For sliders that pick out a range, pass in a vector of 2\\n # values.\\n await update_slider(\\\"inSlider2\\\", value=(c_num - 1, c_num + 1))\\n\\n # TODO: an NA means to not change that value (the low or high one)\\n # await update_slider(\\n # \\\"inSlider3\\\",\\n # value=(NA, c_num+2)\\n # )\\n\\n # Date input ===============================================\\n # Only label and value can be set for date input\\n await update_date(\\\"inDate\\\", label=\\\"Date \\\" + c_label, value=date(2013, 4, c_num))\\n\\n # Date range input =========================================\\n # Only label and value can be set for date range input\\n await update_date_range(\\n \\\"inDateRange\\\",\\n label=\\\"Date range \\\" + c_label,\\n start=date(2013, 1, c_num),\\n end=date(2013, 12, c_num),\\n min=date(2001, 1, c_num),\\n max=date(2030, 1, c_num),\\n )\\n\\n # # Checkbox ===============================================\\n await update_checkbox(\\\"inCheckbox\\\", value=c_num % 2)\\n\\n # Checkbox group ===========================================\\n # Create a list of new options, where the name of the items\\n # is something like 'option label x A', and the values are\\n # 'option-x-A'.\\n opts = zip(\\n [f\\\"option label {c_num} {type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\\n [f\\\"option-{c_num}-{type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\\n )\\n\\n # Set the label, choices, and selected item\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\\n # Change both the label and the text\\n await update_text(\\n \\\"inText\\\",\\n label=\\\"New \\\" + c_label,\\n value=\\\"New text \\\" + str(c_num),\\n )\\n\\n # Number ===================================================\\n # Change the value\\n await update_numeric(\\\"inNumber\\\", value=c_num)\\n\\n # Change the label, value, min, and max\\n await update_numeric(\\n \\\"inNumber2\\\",\\n label=\\\"Number \\\" + c_label,\\n value=c_num,\\n min=c_num - 10,\\n max=c_num + 10,\\n step=5,\\n )\\n\\n # Slider input =============================================\\n # Only label and value can be set for slider\\n await update_slider(\\\"inSlider\\\", label=\\\"Slider \\\" + c_label, value=c_num)\\n\\n # Slider range input =======================================\\n # For sliders that pick out a range, pass in a vector of 2\\n # values.\\n await update_slider(\\\"inSlider2\\\", value=(c_num - 1, c_num + 1))\\n\\n # TODO: an NA means to not change that value (the low or high one)\\n # await update_slider(\\n # \\\"inSlider3\\\",\\n # value=(NA, c_num+2)\\n # )\\n\\n # Date input ===============================================\\n # Only label and value can be set for date input\\n await update_date(\\\"inDate\\\", label=\\\"Date \\\" + c_label, value=date(2013, 4, c_num))\\n\\n # Date range input =========================================\\n # Only label and value can be set for date range input\\n await update_date_range(\\n \\\"inDateRange\\\",\\n label=\\\"Date range \\\" + c_label,\\n start=date(2013, 1, c_num),\\n end=date(2013, 12, c_num),\\n min=date(2001, 1, c_num),\\n max=date(2030, 1, c_num),\\n )\\n\\n # # Checkbox ===============================================\\n await update_checkbox(\\\"inCheckbox\\\", value=c_num % 2)\\n\\n # Checkbox group ===========================================\\n # Create a list of new options, where the name of the items\\n # is something like 'option label x A', and the values are\\n # 'option-x-A'.\\n opts = zip(\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# To run this app:\\n# python3 app.py\\n\\n# Then point web browser to:\\n# http://localhost:8000/\\n\\n# Add parent directory to path, so we can find the prism module.\\n# (This is just a temporary fix)\\nimport os\\nimport sys\\n\\n# This will load the shiny module dynamically, without having to install it.\\n# This makes the debug/run cycle quicker.\\nshiny_module_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\\nsys.path.insert(0, shiny_module_dir)\\n\\nfrom shiny import *\\n\\nui = page_fluid(\\n panel_title(\\\"Changing the values of inputs from the server\\\"),\\n row(\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs control the other inputs on the page\\\"),\\n input_text(\\n \\\"control_label\\\", \\\"This controls some of the labels:\\\", \\\"LABEL TEXT\\\"\\n ),\\n input_slider(\\n \\\"control_num\\\", \\\"This controls values:\\\", min=1, max=20, value=15\\n ),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\\n input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\\n input_numeric(\\n \\\"inNumber\\\", \\\"Number input:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_numeric(\\n \\\"inNumber2\\\", \\\"Number input 2:\\\", min=1, max=20, value=5, step=0.5\\n ),\\n input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\\n input_slider(\\n \\\"inSlider2\\\", \\\"Slider input 2:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_slider(\\n \\\"inSlider3\\\", \\\"Slider input 3:\\\", min=1, max=20, value=(5, 15)\\n ),\\n input_date(\\\"inDate\\\", \\\"Date input:\\\"),\\n input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\\n ),\\n ),\\n column(\\n 3,\\n panel_well(\\n input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\\n input_checkbox_group(\\n \\\"inCheckboxGroup\\\",\\n \\\"Checkbox group input:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_radio_buttons(\\n \\\"inRadio\\\",\\n \\\"Radio buttons:\\\",\\n (\\\"label 1\\\", \\\"option1\\\", \\\"label 2\\\", \\\"option2\\\"),\\n ),\\n input_select(\\n \\\"inSelect\\\",\\n \\\"Select input:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n ),\\n input_select(\\n \\\"inSelect2\\\",\\n \\\"Select input 2:\\\",\\n {\\\"label 1\\\": \\\"option1\\\", \\\"label 2\\\": \\\"option2\\\"},\\n multiple=True,\\n ),\\n ),\\n navs_tab(\\n nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\\n nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\\n id=\\\"inTabset\\\",\\n ),\\n ),\\n ),\\n)\\n\\n\\ndef server(sess: ShinySession):\\n @observe_async()\\n async def _():\\n # We'll use these multiple times, so use short var names for\\n # convenience.\\n c_label = sess.input[\\\"control_label\\\"]\\n c_num = sess.input[\\\"control_num\\\"]\\n\\n print(c_label)\\n print(c_num)\\n\\n # Text =====================================================\\n # Change both the label and the text\\n await update_text(\\n \\\"inText\\\",\\n label=\\\"New \\\" + c_label,\\n value=\\\"New text \\\" + str(c_num),\\n )\\n\\n # Number ===================================================\\n # Change the value\\n await update_numeric(\\\"inNumber\\\", value=c_num)\\n\\n # Change the label, value, min, and max\\n await update_numeric(\\n \\\"inNumber2\\\",\\n label=\\\"Number \\\" + c_label,\\n value=c_num,\\n min=c_num - 10,\\n max=c_num + 10,\\n step=5,\\n )\\n\\n # Slider input =============================================\\n # Only label and value can be set for slider\\n await update_slider(\\\"inSlider\\\", label=\\\"Slider \\\" + c_label, value=c_num)\\n\\n # Slider range input =======================================\\n # For sliders that pick out a range, pass in a vector of 2\\n # values.\\n await update_slider(\\\"inSlider2\\\", value=(c_num - 1, c_num + 1))\\n\\n # TODO: an NA means to not change that value (the low or high one)\\n # await update_slider(\\n # \\\"inSlider3\\\",\\n # value=(NA, c_num+2)\\n # )\\n\\n # Date input ===============================================\\n # Only label and value can be set for date input\\n await update_date(\\\"inDate\\\", label=\\\"Date \\\" + c_label, value=date(2013, 4, c_num))\\n\\n # Date range input =========================================\\n # Only label and value can be set for date range input\\n await update_date_range(\\n \\\"inDateRange\\\",\\n label=\\\"Date range \\\" + c_label,\\n start=date(2013, 1, c_num),\\n end=date(2013, 12, c_num),\\n min=date(2001, 1, c_num),\\n max=date(2030, 1, c_num),\\n )\\n\\n # # Checkbox ===============================================\\n await update_checkbox(\\\"inCheckbox\\\", value=c_num % 2)\\n\\n # Checkbox group ===========================================\\n # Create a list of new options, where the name of the items\\n # is something like 'option label x A', and the values are\\n # 'option-x-A'.\\n opts = zip(\\n [f\\\"option label {c_num} {type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\\n [f\\\"option-{c_num}-{type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\\n )\\n\\n # Set the label, choices, and selected item\\n await update_checkbox_group(\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":[" async def _():","ui = page_fluid("," panel_title(\"Changing the values of inputs from the server\"),"," row("," column("," panel_well("," input_text("," input_slider("," input_text(\"inText\", \"Text input:\", value=\"start text\"),"," input_numeric("," input_slider(\"inSlider\", \"Slider input:\", min=1, max=20, value=15),"," input_date(\"inDate\", \"Date input:\"),"," input_date_range(\"inDateRange\", \"Date range input:\"),"," input_checkbox(\"inCheckbox\", \"Checkbox input\", value=False),"," input_checkbox_group("," input_radio_buttons("," input_select(","app = ShinyApp(ui, server, debug=True)"," navs_tab("," nav(\"panel1\", h2(\"This is the first panel.\")),"," nav(\"panel2\", h2(\"This is the second panel.\")),"," @observe_async()"," app.run()"," start=date(2013, 1, c_num),",""," # Date input ==============================================="," # Change both the label and the text","# python3 app.py"," ),"," await update_checkbox_group("," [f\"option label {c_num} {type}\" for type in [\"A\", \"B\"]],"," tags.h4(\"These inputs are controlled by the other inputs\"),"," \"inCheckboxGroup\","],"string":"[\n \" async def _():\",\n \"ui = page_fluid(\",\n \" panel_title(\\\"Changing the values of inputs from the server\\\"),\",\n \" row(\",\n \" column(\",\n \" panel_well(\",\n \" input_text(\",\n \" input_slider(\",\n \" input_text(\\\"inText\\\", \\\"Text input:\\\", value=\\\"start text\\\"),\",\n \" input_numeric(\",\n \" input_slider(\\\"inSlider\\\", \\\"Slider input:\\\", min=1, max=20, value=15),\",\n \" input_date(\\\"inDate\\\", \\\"Date input:\\\"),\",\n \" input_date_range(\\\"inDateRange\\\", \\\"Date range input:\\\"),\",\n \" input_checkbox(\\\"inCheckbox\\\", \\\"Checkbox input\\\", value=False),\",\n \" input_checkbox_group(\",\n \" input_radio_buttons(\",\n \" input_select(\",\n \"app = ShinyApp(ui, server, debug=True)\",\n \" navs_tab(\",\n \" nav(\\\"panel1\\\", h2(\\\"This is the first panel.\\\")),\",\n \" nav(\\\"panel2\\\", h2(\\\"This is the second panel.\\\")),\",\n \" @observe_async()\",\n \" app.run()\",\n \" start=date(2013, 1, c_num),\",\n \"\",\n \" # Date input ===============================================\",\n \" # Change both the label and the text\",\n \"# python3 app.py\",\n \" ),\",\n \" await update_checkbox_group(\",\n \" [f\\\"option label {c_num} {type}\\\" for type in [\\\"A\\\", \\\"B\\\"]],\",\n \" tags.h4(\\\"These inputs are controlled by the other inputs\\\"),\",\n \" \\\"inCheckboxGroup\\\",\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":204,"cells":{"repo_id":{"kind":"number","value":56,"string":"56"},"repo_name":{"kind":"string","value":"azure__review-checklists"},"project_context":{"kind":"string","value":"azure__review-checklists METASEP\n\nweb/flaskmysql/app.py METASEP\n#app.py\nfrom flask import Flask, request, render_template, jsonify\nfrom flaskext.mysql import MySQL #pip install flask-mysql\nimport pymysql\nimport os\n \napp = Flask(__name__)\n\n# Get database credentials from environment variables\nmysql_server_fqdn = os.environ.get(\"MYSQL_FQDN\")\nif mysql_server_fqdn == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_FQDN' with the FQDN of the MySQL server\")\n sys.exit(1)\nmysql_server_name = mysql_server_fqdn.split('.')[0]\nmysql_server_username = os.environ.get(\"MYSQL_USER\")\nif mysql_server_username == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_USER' with the FQDN of the MySQL username\")\n sys.exit(1)\nif not mysql_server_username.__contains__('@'):\n mysql_server_username += '@' + mysql_server_name\nmysql_server_password = os.environ.get(\"MYSQL_PASSWORD\")\nif mysql_server_password == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_PASSWORD' with the FQDN of the MySQL password\")\n sys.exit(1)\n\n# Open connection\nmysql = MySQL()\napp.config['MYSQL_DATABASE_USER'] = mysql_server_username\napp.config['MYSQL_DATABASE_PASSWORD'] = mysql_server_password\napp.config['MYSQL_DATABASE_DB'] = 'checklist'\napp.config['MYSQL_DATABASE_HOST'] = mysql_server_fqdn\nmysql.init_app(app)\n \n@app.route('/')\ndef home():\n app.logger.info(\"DEBUG: Connecting to database...\") \n try:\n category_filter = request.args.get('category', None)\n status_filter = request.args.get('status', None)\n severity_filter = request.args.get('severity', None)\n except Exception as e:\n app.logger.info(\"ERROR reading query parameters for filters: {0}\".format(str(e)))\n pass\n try:\n conn = mysql.connect()\n cursor = conn.cursor(pymysql.cursors.DictCursor)\n except Exception as e:\n app.logger.info(\"ERROR opening cursor to DB connection: {0}\".format(str(e)))\n return jsonify(str(e))\n try:\n sqlquery = \"SELECT * from items\"\n filter1_added = False\n # category filter\n if category_filter:\n if filter1_added:\n sqlquery += \" AND \"\n else:\n sqlquery += \" WHERE \"\n filter1_added = True\n sqlquery += \"category = '{0}'\".format(category_filter)\n # status filter\n if status_filter:\n if filter1_added:\n sqlquery += \" AND \"\n else:\n sqlquery += \" WHERE \"\n filter1_added = True\n sqlquery += \"status = '{0}'\".format(status_filter)\n # severity filter\n if severity_filter:\n if filter1_added:\n sqlquery += \" AND \"\n else:\n sqlquery += \" WHERE \"\n filter1_added = True\n sqlquery += \"severity = '{0}'\".format(severity_filter)\n # send queries\n app.logger.info (\"Retrieving checklist items with query '{0}'\".format(sqlquery))\n cursor.execute(sqlquery)\n itemslist = cursor.fetchall()\n cursor.execute(\"SELECT DISTINCT category FROM items\")\n categorylist = cursor.fetchall()\n cursor.execute(\"SELECT DISTINCT severity FROM items\")\n severitylist = cursor.fetchall()\n cursor.execute(\"SELECT DISTINCT status FROM items\")\n statuslist = cursor.fetchall()\n return render_template('index.html', itemslist=itemslist, categorylist=categorylist, severitylist=severitylist, statuslist=statuslist)\n except Exception as e:\n app.logger.info(\"ERROR sending query: {0}\".format(str(e)))\n return jsonify(str(e))\n \n@app.route(\"/update\",methods=[\"POST\",\"GET\"])\ndef update():\n app.logger.info(\"Processing {0} with request.form {1}\".format(str(request.method), str(request.form))) \n try:\n conn = mysql.connect()\n cursor = conn.cursor(pymysql.cursors.DictCursor)\n if request.method == 'POST':\n field = request.form['field'] \n value = request.form['value']\n editid = request.form['id']\n app.logger.info(\"Processing POST for field '{0}', editid '{1}' and value '{2}'\".format(field, value, editid)) \n \n if field == 'comment' and value != '':\n sql = \"UPDATE items SET comments=%s WHERE guid=%s\"\n data = (value, editid)\n conn = mysql.connect()\n cursor = conn.cursor()\n app.logger.info (\"Sending SQL query '{0}' with data '{1}'\".format(sql, str(data)))\n cursor.execute(sql, data)\n conn.commit()\n elif field == 'status' and value != '':\n sql = \"UPDATE items SET status=%s WHERE guid=%s\"\n data = (value, editid)\n conn = mysql.connect()\n cursor = conn.cursor()\n app.logger.info (\"Sending SQL query '{0}' with data '{1}'\".format(sql, str(data)))\n cursor.execute(sql, data)\n conn.commit()\n else:\n app.logger.info (\"Field is '{0}', value is '{1}': not doing anything\".format(field, value))\n success = 1\n return jsonify(success)\n except Exception as e:\n app.logger.info(\"Oh oh, there is an error: {0}\".format(str(e)))\n success = 0\n return jsonify(success)\n finally:\n cursor.close() \n conn.close()\n \nif __name__ == \"__main__\":\n app.run(host='0.0.0.0', debug=True)\n\n\nweb/fillgraphdb/graph_db.py METASEP\nimport os\nimport sys\nimport pymysql\nimport json\nimport time\nimport requests\nimport azure.mgmt.resourcegraph as arg\nfrom datetime import datetime\nfrom azure.mgmt.resource import SubscriptionClient\nfrom azure.identity import AzureCliCredential\nfrom azure.identity import DefaultAzureCredential\nfrom azure.identity import ClientSecretCredential\n\n# Database and table name\nmysql_db_name = \"checklist\"\nmysql_db_table = \"items\"\nuse_ssl = \"yes\"\n\n# Format a string to be included in a SQL query as value\ndef escape_quotes (this_value):\n return str(this_value).replace(\"'\", \"\\\\'\")\n\n# Function to send an Azure Resource Graph query\ndef get_resources (graph_query, argClient, subsList, argQueryOptions):\n # TO DO: Authentication should probably happen outside of this function\n try:\n # Create query\n argQuery = arg.models.QueryRequest(subscriptions=subsList, query=graph_query, options=argQueryOptions)\n # Run query and return results\n argResults = argClient.resources(argQuery)\n print(\"DEBUG: query results: {0}\".format(str(argResults)))\n return argResults\n except Exception as e:\n print(\"ERROR: Error sending Azure Resource Graph query to Azure: {0}\".format(str(e)))\n # sys.exit(0) # Debugging.... Probably this should be exit(1)\n return ''\n\n# Wait for IMDS endpoint to be available\ntry:\n wait_max_intervals = int(os.environ.get(\"WAIT_INTERVALS\"))\n print (\"DEBUG: WAIT_INTERVALS read from environment variable: {0}\".format(str(wait_max_intervals)))\nexcept:\n wait_max_intervals = 5\n print (\"DEBUG: WAIT_INTERVALS set to default value: {0}\".format(str(wait_max_intervals)))\nwait_interval = 10.0\nimds_url = 'http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://management.azure.com/'\nimds_headers = {\n \"Metadata\" : \"true\"\n}\nimds_tries = 0\nbreak_loop = False\nprint ('DEBUG: Going into waiting loop to make sure the metadata endpoint is active...')\nwhile not break_loop:\n imds_tries += 1\n print (\"DEBUG: We are in the loop, pass {0}/{1} ({2}). Trying the IMDS endpoint...\".format(str(imds_tries), str(wait_max_intervals), str(datetime.now())))\n if imds_tries > wait_max_intervals:\n print(\"ERROR: max wait intervals exceeded when waiting for IMDS to answer, hopefully you specified some SP credentials as SP variables...\")\n break_loop = True\n else:\n print (\"DEBUG: Sending GET request to {0}...\".format(imds_url))\n try:\n imds_response = requests.get(imds_url, headers=imds_headers, timeout=1)\n if imds_response.status_code >= 200 and imds_response.status_code <= 299:\n print (\"DEBUG: IMDS endpoint seems to be working, received status code {0} and answer {1}\".format(str(imds_response.status_code), str(imds_response.text)))\n break_loop = True\n else:\n print (\"DEBUG: IMDS endpoint doesnt seem to be working, received status code {0} and answer {1}\".format(str(imds_response.status_code), str(imds_response.text)))\n except Exception as e:\n print(\"DEBUG: Error sending request to IMDS endpoint: {0}\".format(str(e)))\n pass\n if not break_loop:\n print(\"DEBUG: Going to sleep {0} seconds before next try...\".format(str(wait_interval)))\n time.sleep (wait_interval)\n\n# Authenticate to Azure, either with Managed Identity or SP\nprint('DEBUG: Authenticating to Azure...')\ntry:\n print('DEBUG: Getting environment variables...')\n # credential = AzureCliCredential() # Get your credentials from Azure CLI (development only!) and get your subscription list\n tenant_id = os.environ.get(\"AZURE_TENANT_ID\")\n client_id = os.environ.get(\"AZURE_CLIENT_ID\")\n client_secret = os.environ.get(\"AZURE_CLIENT_SECRET\")\nexcept Exception as e:\n print(\"ERROR: Error getting environment variables: {0}\".format(str(e)))\n tenant_id = None\n client_id = None\n client_secret = None\n pass \ntry:\n if tenant_id and client_id and client_secret:\n print(\"DEBUG: Service principal credentials (client ID {0}, tenant ID {1}) retrieved from environment variables, trying SP-based authentication now...\".format(str(client_id), str(tenant_id)))\n credential = ClientSecretCredential(tenant_id=tenant_id, client_id=client_id, client_secret=client_secret)\n else:\n print('DEBUG: Service principal credentials could not be retrieved from environment variables, trying default authentication method with Managed Identity...')\n credential = DefaultAzureCredential() # Managed identity\nexcept Exception as e:\n print(\"ERROR: Error during Azure Authentication: {0}\".format(str(e)))\n sys.exit(1)\ntry:\n print('DEBUG: Getting subscriptions...')\n subsClient = SubscriptionClient(credential)\n subsRaw = []\n for sub in subsClient.subscriptions.list():\n subsRaw.append(sub.as_dict())\n subsList = []\n for sub in subsRaw:\n subsList.append(sub.get('subscription_id'))\n print (\"DEBUG: provided credentials give access to {0} subscription(s)\".format(str(len(subsList))))\n # Create Azure Resource Graph client and set options\n print('DEBUG: Creating client object...')\n argClient = arg.ResourceGraphClient(credential)\n argQueryOptions = arg.models.QueryRequestOptions(result_format=\"objectArray\")\nexcept Exception as e:\n print(\"ERROR: Error creating resource graph client object: {0}\".format(str(e)))\n sys.exit(1)\n\n# Get database credentials from environment variables\nmysql_server_fqdn = os.environ.get(\"MYSQL_FQDN\")\nif mysql_server_fqdn == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_FQDN' with the FQDN of the MySQL server\")\n sys.exit(1)\nelse:\n print(\"DEBUG: mysql FQDN retrieved from environment variables: '{0}'\".format(mysql_server_fqdn))\nmysql_server_name = mysql_server_fqdn.split('.')[0]\nmysql_server_username = os.environ.get(\"MYSQL_USER\")\nif mysql_server_username == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_USER' with the FQDN of the MySQL username\")\n sys.exit(1)\nelse:\n print(\"DEBUG: mysql authentication username retrieved from environment variables: '{0}'\".format(mysql_server_username))\nif not mysql_server_username.__contains__('@'):\n mysql_server_username += '@' + mysql_server_name\nmysql_server_password = os.environ.get(\"MYSQL_PASSWORD\")\nif mysql_server_password == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_PASSWORD' with the FQDN of the MySQL password\")\n sys.exit(1)\nelse:\n print(\"DEBUG: mysql authentication password retrieved from environment variables: {0}\".format(\"********\"))\n\n# Create connection to MySQL server and number of records\nprint (\"DEBUG: Connecting to '{0}' with username '{1}'...\".format(mysql_server_fqdn, mysql_server_username))\nif use_ssl == 'yes':\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, database = mysql_db_name, passwd = mysql_server_password, ssl = {'ssl':{'ca': 'BaltimoreCyberTrustRoot.crt.pem'}})\nelse:\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, database = mysql_db_name, passwd = mysql_server_password)\nsql_query = \"SELECT * FROM {0} WHERE graph_query_success IS NOT null AND graph_query_failure IS NOT null AND graph_query_success != 'None' AND graph_query_failure != 'None';\".format (mysql_db_table)\ncursor = db.cursor()\ncursor.execute(sql_query)\nrows = cursor.fetchall()\nrow_cnt = 0\nif len(rows) > 0:\n for row in rows:\n row_cnt += 1\n result_text = ''\n item_guid = row[0]\n item_success_query = row[10]\n item_failure_query = row[11]\n # print (\"DEBUG {0}: '{1}', '{2}'\".format(item_guid, item_success_query, item_failure_query))\n success_resources = str(get_resources(item_success_query, argClient, subsList, argQueryOptions)).replace(\"'\", '\"')\n success_resources = success_resources.replace(': None', ': \"None\"')\n # print (\"DEBUG: SUCCESS QUERY: {0}\".format(success_resources))\n if success_resources:\n try:\n success_resources_object = json.loads(success_resources)\n except:\n print(\"ERROR: JSON returned from Azure Graph Query not valid: {0}\".format(success_resources))\n for resource in success_resources_object['data']:\n if result_text: result_text += '\\n'\n result_text += \"SUCCESS: {0}\".format(resource[\"id\"])\n failure_resources = str(get_resources(item_failure_query, argClient, subsList, argQueryOptions)).replace(\"'\", '\"')\n failure_resources = failure_resources.replace(': None', ': \"None\"')\n # print (\"DEBUG: FAILURE QUERY: {0}\".format(failure_resources))\n if failure_resources:\n try:\n failure_resources_object = json.loads(failure_resources)\n except:\n print(\"ERROR: JSON returned from Azure Graph Query not valid: {0}\".format(failure_resources))\n for resource in failure_resources_object['data']:\n if result_text: result_text += '\\n'\n result_text += \"FAILURE: {0}\".format(resource[\"id\"])\n # print (\"DEBUG: Result summary: \\n{0}\".format(result_text))\n if result_text:\n update_query = \"UPDATE items SET graph_query_result = '{0}' WHERE guid = '{1}';\".format(result_text, item_guid)\n print (\"DEBUG: sending SQL query '{0}'\".format(update_query))\n try:\n cursor.execute(update_query)\n db.commit()\n except Exception as e:\n print(\"ERROR: Error sending SQL query to MySql server: {0}\".format(str(e)))\n pass\n else:\n print(\"DEBUG: No results could be retrieved for the success and failure queries of checklist item {0}\".format(item_guid))\nelse:\n row_count = 0\nprint (\"INFO: Processed table {0} in database {1} with {2} records with graph queries. Happy review!\".format(mysql_db_table, mysql_db_name, str(row_cnt)))\n\n# Bye\ndb.close()\nweb/filldb/fill_db.py METASEP\nimport requests\nimport json\nimport os\nimport sys\nimport pymysql\n\n# Database and table name\nmysql_db_name = \"checklist\"\nmysql_db_table = \"items\"\nuse_ssl = \"yes\"\n\n# Format a string to be included in a SQL query as value\ndef escape_quotes(this_value):\n return str(this_value).replace(\"'\", \"\\\\'\")\n\n# Get database credentials from environment variables\nmysql_server_fqdn = os.environ.get(\"MYSQL_FQDN\")\nif mysql_server_fqdn == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_FQDN' with the FQDN of the MySQL server\")\n sys.exit(1)\nmysql_server_name = mysql_server_fqdn.split('.')[0]\nmysql_server_username = os.environ.get(\"MYSQL_USER\")\nif mysql_server_username == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_USER' with the FQDN of the MySQL username\")\n sys.exit(1)\nif not mysql_server_username.__contains__('@'):\n mysql_server_username += '@' + mysql_server_name\nmysql_server_password = os.environ.get(\"MYSQL_PASSWORD\")\nif mysql_server_password == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_PASSWORD' with the FQDN of the MySQL password\")\n sys.exit(1)\n\n# Create connection to MySQL server and get version\nprint (\"INFO: Connecting to {0} with username {1}...\".format(mysql_server_fqdn, mysql_server_username))\nif use_ssl == 'yes':\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, passwd = mysql_server_password, ssl = {'ssl':{'ca': 'BaltimoreCyberTrustRoot.crt.pem'}})\nelse:\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, passwd = mysql_server_password)\nsql_query = \"SELECT VERSION();\"\ncursor = db.cursor()\ncursor.execute(sql_query)\nrows = cursor.fetchall()\ndata = \"\"\nif len(rows) > 0:\n for row in rows:\n if len(data) > 0:\n data += ', '\n data += str(''.join(row))\nprint (\"INFO: Connected to MySQL server {0} with version {1}\".format(mysql_server_fqdn, data))\n\n# Delete db if existed\nsql_query = \"DROP DATABASE IF EXISTS {0};\".format(mysql_db_name)\n# print (\"Sending query: {0}\".format(sql_query))\ncursor.execute(sql_query)\ndb.commit()\n\n# Create database\nsql_query = \"CREATE DATABASE IF NOT EXISTS {0};\".format(mysql_db_name)\n# print (\"Sending query: {0}\".format(sql_query))\ncursor.execute(sql_query)\ndb.commit()\nsql_query = \"USE {0}\".format(mysql_db_name)\n# print (\"Sending query: {0}\".format(sql_query))\ncursor.execute(sql_query)\ndb.commit()\n\n# Create table\nsql_query = \"\"\"CREATE TABLE {0} (\n guid varchar(40),\n text varchar(1024),\n description varchar(1024),\n link varchar(255),\n training varchar(255),\n comments varchar(1024),\n severity varchar(10),\n status varchar(15),\n category varchar(255),\n subcategory varchar(255),\n graph_query_success varchar(1024),\n graph_query_failure varchar(1024),\n graph_query_result varchar(4096)\n);\"\"\".format(mysql_db_table)\n# print (\"DEBUG: Sending query: {0}\".format(sql_query))\ncursor.execute(sql_query)\ndb.commit()\n\n# Download checklist\ntechnology = os.environ.get(\"CHECKLIST_TECHNOLOGY\")\nif technology:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\" + technology + \"_checklist.en.json\"\nelse:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\"\nresponse = requests.get(checklist_url)\n\n# If download was successful\nif response.status_code == 200:\n print (\"INFO: File {0} downloaded successfully\".format(checklist_url))\n try:\n # Deserialize JSON to object variable\n checklist_object = json.loads(response.text)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\n # Get default status from the JSON, default to \"Not verified\"\n try:\n status_list = checklist_object.get(\"status\")\n default_status = status_list[0].get(\"name\")\n except:\n default_status = \"Not verified\"\n pass\n # For each checklist item, add a row to mysql DB\n row_counter = 0\n for item in checklist_object.get(\"items\"):\n guid = item.get(\"guid\")\n category = item.get(\"category\")\n subcategory = item.get(\"subcategory\")\n text = escape_quotes(item.get(\"text\"))\n description = escape_quotes(item.get(\"description\"))\n severity = item.get(\"severity\")\n link = item.get(\"link\")\n training = item.get(\"training\")\n status = default_status\n graph_query_success = escape_quotes(item.get(\"graph_success\"))\n graph_query_failure = escape_quotes(item.get(\"graph_failure\"))\n # print(\"DEBUG: Adding to table {0}: '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}', '{8}', '{9}', '{10}'\".format(mysql_db_table, category, subcategory, text, description, severity, link, training, graph_query_success, graph_query_failure, guid))\n sql_query = \"\"\"INSERT INTO {0} (category,subcategory,text,description,severity,link,training,graph_query_success,graph_query_failure,guid,status) \n VALUES ('{1}','{2}','{3}','{4}','{5}', '{6}','{7}','{8}','{9}','{10}', '{11}');\"\"\".format(mysql_db_table, category, subcategory, text, description, severity, link, training, graph_query_success, graph_query_failure, guid, status)\n # print (\"DEBUG: Sending query: {0}\".format(sql_query))\n cursor.execute(sql_query)\n db.commit()\n row_counter += 1\nelse:\n print (\"Error downloading {0}\".format(checklist_url))\n\n# Bye\nprint(\"INFO: {0} rows added to database.\".format(str(row_counter)))\ndb.close()\nweb/filldb/check_db.py METASEP\nimport os\nimport sys\nimport pymysql\n\n# Database and table name\nmysql_db_name = \"checklist\"\nmysql_db_table = \"items\"\nuse_ssl = \"yes\"\n\n# Format a string to be included in a SQL query as value\ndef escape_quotes(this_value):\n return str(this_value).replace(\"'\", \"\\\\'\")\n\n# Get database credentials from environment variables\nmysql_server_fqdn = os.environ.get(\"MYSQL_FQDN\")\nif mysql_server_fqdn == None:\n print(\"Please define an environment variable 'MYSQL_FQDN' with the FQDN of the MySQL server\")\n sys.exit(1)\nmysql_server_name = mysql_server_fqdn.split('.')[0]\nmysql_server_username = os.environ.get(\"MYSQL_USER\")\nif mysql_server_username == None:\n print(\"Please define an environment variable 'MYSQL_USER' with the FQDN of the MySQL username\")\n sys.exit(1)\nif not mysql_server_username.__contains__('@'):\n mysql_server_username += '@' + mysql_server_name\nmysql_server_password = os.environ.get(\"MYSQL_PASSWORD\")\nif mysql_server_password == None:\n print(\"Please define an environment variable 'MYSQL_PASSWORD' with the FQDN of the MySQL password\")\n sys.exit(1)\n\n# Create connection to MySQL server and number of records\nprint (\"Connecting to {0} with username {1}...\".format(mysql_server_fqdn, mysql_server_username))\nif use_ssl == 'yes':\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, database = mysql_db_name, passwd = mysql_server_password, ssl = {'ssl':{'ca': 'BaltimoreCyberTrustRoot.crt.pem'}})\nelse:\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, database = mysql_db_name, passwd = mysql_server_password)\nsql_query = \"SELECT COUNT(*) FROM {0};\".format (mysql_db_table)\ncursor = db.cursor()\ncursor.execute(sql_query)\nrows = cursor.fetchall()\nif len(rows) > 0:\n row_count = rows[0][0]\nelse:\n row_count = 0\nprint (\"Table {0} in database {1} contains {2} records\".format(mysql_db_table, mysql_db_name, str(row_count)))\n\n# Bye\ndb.close()\nscripts/update_excel_xlwings.py METASEP\n######################################################################\n#\n# This script reads the checklist items from the latest checklist file\n# in Github (or from a local file) and populates an Excel spreadsheet\n# with the contents.\n# \n# Last updated: March 2022\n#\n######################################################################\n\nimport json\nimport argparse\nimport sys\nimport os\nimport requests\nimport xlwings as xw\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\nparser.add_argument('--technology', dest='technology', action='store',\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\nparser.add_argument('--excel-file', dest='excel_file', action='store',\n help='You need to supply an Excel file where the checklist will be written')\nparser.add_argument('--app-mode', dest='appmode', action='store_true',\n default=False,\n help='Open Excel workbook in App mode, not great for systems without Excel installed (default: False)')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\nchecklist_file = args.checklist_file\nexcel_file = args.excel_file\ntechnology = args.technology\n\n# Constants\nworksheet_checklist_name = 'Checklist'\nrow1 = 10 # First row after which the Excel spreadsheet will be updated\ncol_checklist_name = \"A\"\nrow_checklist_name = \"6\"\nguid_column_index = \"L\"\ncomment_column_index = \"G\"\nsample_cell_index = 'A2'\ncol_area = \"A\"\ncol_subarea = \"B\"\ncol_check = \"C\"\ncol_desc = \"D\"\ncol_sev = \"E\"\ncol_status = \"F\"\ncol_comment = \"G\"\ncol_link = \"H\"\ncol_training = \"I\"\ncol_arg_success = \"J\"\ncol_arg_failure = \"K\"\ncol_guid = \"L\"\ninfo_link_text = 'More info'\ntraining_link_text = 'Training'\nworksheet_values_name = 'Values'\nvalues_row1 = 2\ncol_values_severity = \"A\"\ncol_values_status = \"B\"\ncol_values_area = \"C\"\ncol_values_description = \"H\"\n\n\n# Download checklist\nif checklist_file:\n if args.verbose:\n print(\"DEBUG: Opening checklist file\", checklist_file)\n # Get JSON\n try:\n with open(checklist_file) as f:\n checklist_data = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\n sys.exit(1)\nelse:\n if technology:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\" + technology + \"_checklist.en.json\"\n else:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\"\n if args.verbose:\n print(\"DEBUG: Downloading checklist file from\", checklist_url)\n response = requests.get(checklist_url)\n # If download was successful\n if response.status_code == 200:\n if args.verbose:\n print (\"DEBUG: File {0} downloaded successfully\".format(checklist_url))\n try:\n # Deserialize JSON to object variable\n checklist_data = json.loads(response.text)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\n\n# Load workbook\ntry:\n if args.appmode:\n print(\"DEBUG: opening Excel workbook in app mode 'App().books.open'...\")\n app = xw.App()\n wb = app.books.open(excel_file)\n else:\n print(\"DEBUG: opening Excel workbook with xb.Book function...\")\n wb = xw.Book(excel_file) # This line is occassionally giving the error \"(-2147352570, 'Unknown name.', None, None)\"\n if args.verbose:\n print(\"DEBUG: workbook\", excel_file, \"opened successfully\")\nexcept Exception as e:\n print(\"ERROR: Error when opening Excel file\", excel_file, \"-\", str(e))\n sys.exit(1)\n\n# Get worksheet\ntry:\n ws = wb.sheets[worksheet_checklist_name]\n if args.verbose:\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\nexcept Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\n sys.exit(1)\n\n# Set checklist name\ntry:\n ws.range(col_checklist_name + row_checklist_name).value = checklist_data[\"metadata\"][\"name\"]\n if args.verbose:\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\nexcept Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\n sys.exit(1)\n\n# Get default status from the JSON, default to \"Not verified\"\ntry:\n status_list = checklist_data.get(\"status\")\n default_status = status_list[0].get(\"name\")\n if args.verbose:\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\nexcept:\n default_status = \"Not verified\"\n if args.verbose:\n print (\"DEBUG: Using default status 'Not verified'\")\n pass\n\n# For each checklist item, add a row to spreadsheet\nrow_counter = row1\nfor item in checklist_data.get(\"items\"):\n # Read variables from JSON\n guid = item.get(\"guid\")\n category = item.get(\"category\")\n subcategory = item.get(\"subcategory\")\n text = item.get(\"text\")\n description = item.get(\"description\")\n severity = item.get(\"severity\")\n link = item.get(\"link\")\n training = item.get(\"training\")\n status = default_status\n graph_query_success = item.get(\"graph_success\")\n graph_query_failure = item.get(\"graph_failure\")\n # Update Excel\n ws.range(col_area + str(row_counter)).value = category\n ws.range(col_subarea + str(row_counter)).value = subcategory\n ws.range(col_check + str(row_counter)).value = text\n ws.range(col_desc + str(row_counter)).value = description\n ws.range(col_sev + str(row_counter)).value = severity\n ws.range(col_status + str(row_counter)).value = status\n # ws.range(col_link + str(row_counter)).value = link\n if link != None:\n link_elements = link.split('#')\n link_address = link_elements[0]\n if len(link_elements) > 1:\n link_subaddress = link_elements[1]\n else:\n link_subaddress = \"\"\n ws.api.Hyperlinks.Add (Anchor=ws.range(col_link + str(row_counter)).api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\n # ws.range(col_training + str(row_counter)).value = training\n if training != None:\n training_elements = training.split('#')\n training_address = training_elements[0]\n if len(training_elements) > 1:\n training_subaddress = training_elements[1]\n else:\n training_subaddress = \"\"\n ws.api.Hyperlinks.Add (Anchor=ws.range(col_training + str(row_counter)).api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\n # GUID and ARG queries\n ws.range(col_arg_success + str(row_counter)).value = graph_query_success\n ws.range(col_arg_failure + str(row_counter)).value = graph_query_failure\n ws.range(col_guid + str(row_counter)).value = guid\n # Next row\n row_counter += 1\n\n# Display summary\nif args.verbose:\n print(\"DEBUG:\", str(row_counter - row1), \"checks addedd to Excel spreadsheet\")\n\n# Get worksheet\ntry:\n wsv = wb.sheets[worksheet_values_name]\n if args.verbose:\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\nexcept Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\n sys.exit(1)\n\n\n# Update categories\nrow_counter = values_row1\nfor item in checklist_data.get(\"categories\"):\n area = item.get(\"name\")\n wsv.range(col_values_area + str(row_counter)).value = area\n row_counter += 1\n\n# Display summary\nif args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"categories addedd to Excel spreadsheet\")\n\n# Update status\nrow_counter = values_row1\nfor item in checklist_data.get(\"status\"):\n status = item.get(\"name\")\n description = item.get(\"description\")\n wsv.range(col_values_status + str(row_counter)).value = status\n wsv.range(col_values_description + str(row_counter)).value = description\n row_counter += 1\n\n# Display summary\nif args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\n\n# Update severities\nrow_counter = values_row1\nfor item in checklist_data.get(\"severities\"):\n severity = item.get(\"name\")\n wsv.range(col_values_severity + str(row_counter)).value = severity\n row_counter += 1\n\n# Display summary\nif args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"severities addedd to Excel spreadsheet\")\n\n# Close book\nif args.verbose:\n print(\"DEBUG: saving workbook\", excel_file)\ntry:\n wb.save()\n if args.appmode:\n app.quit() # If we were in app mode, close Excel\nexcept Exception as e:\n print(\"ERROR: Error when saving Excel file\", excel_file, \"-\", str(e))\n sys.exit(1)\n\nscripts/update_excel_openpyxl.py METASEP\n######################################################################\n#\n# This script reads the checklist items from the latest checklist file\n# in Github (or from a local file) and populates an Excel spreadsheet\n# with the contents.\n# \n# Last updated: March 2022\n#\n######################################################################\n\nimport json\nimport argparse\nimport sys\nimport os\nimport requests\nimport glob\nfrom openpyxl import load_workbook\nfrom openpyxl.worksheet.datavalidation import DataValidation\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\nparser.add_argument('--technology', dest='technology', action='store',\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\nparser.add_argument('--excel-file', dest='excel_file', action='store',\n help='You need to supply an Excel file where the checklist will be written')\nparser.add_argument('--output-excel-file', dest='output_excel_file', action='store',\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\nparser.add_argument('--output-path', dest='output_path', action='store',\n help='If using --output-name-is-input-name, folder where to store the results')\nparser.add_argument('--output-name-is-input-name', dest='output_name_is_input_name', action='store_true',\n default=False,\n help='Save the output in a file with the same filename as the JSON input, but with xlsx extension')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\nchecklist_file = args.checklist_file\nexcel_file = args.excel_file\ntechnology = args.technology\n\n# Constants\nworksheet_checklist_name = 'Checklist'\nrow1 = 8 # First row after which the Excel spreadsheet will be updated\ncol_checklist_name = \"A\"\nrow_checklist_name = \"4\"\nguid_column_index = \"L\"\ncomment_column_index = \"G\"\nsample_cell_index = 'A4'\ncol_area = \"A\"\ncol_subarea = \"B\"\ncol_check = \"C\"\ncol_desc = \"D\"\ncol_sev = \"E\"\ncol_status = \"F\"\ncol_comment = \"G\"\ncol_link = \"H\"\ncol_training = \"I\"\ncol_arg_success = \"J\"\ncol_arg_failure = \"K\"\ncol_guid = \"L\"\ninfo_link_text = 'More info'\ntraining_link_text = 'Training'\nworksheet_values_name = 'Values'\nvalues_row1 = 2\ncol_values_severity = \"A\"\ncol_values_status = \"B\"\ncol_values_area = \"C\"\ncol_values_description = \"H\"\n\n# Main function\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\n # Load workbook\n try:\n wb = load_workbook(filename = input_excel_file)\n if args.verbose:\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\n except Exception as e:\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\n sys.exit(1)\n\n # Get worksheet\n try:\n ws = wb[worksheet_checklist_name]\n if args.verbose:\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\n except Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\n sys.exit(1)\n\n # Set checklist name\n try:\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\n if args.verbose:\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\n except Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\n sys.exit(1)\n\n # Get default status from the JSON, default to \"Not verified\"\n try:\n status_list = checklist_data.get(\"status\")\n default_status = status_list[0].get(\"name\")\n if args.verbose:\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\n except:\n default_status = \"Not verified\"\n if args.verbose:\n print (\"DEBUG: Using default status 'Not verified'\")\n pass\n\n # For each checklist item, add a row to spreadsheet\n row_counter = row1\n for item in checklist_data.get(\"items\"):\n # Read variables from JSON\n guid = item.get(\"guid\")\n category = item.get(\"category\")\n subcategory = item.get(\"subcategory\")\n text = item.get(\"text\")\n description = item.get(\"description\")\n severity = item.get(\"severity\")\n link = item.get(\"link\")\n training = item.get(\"training\")\n status = default_status\n graph_query_success = item.get(\"graph_success\")\n graph_query_failure = item.get(\"graph_failure\")\n # Update Excel\n ws[col_area + str(row_counter)].value = category\n ws[col_subarea + str(row_counter)].value = subcategory\n ws[col_check + str(row_counter)].value = text\n ws[col_desc + str(row_counter)].value = description\n ws[col_sev + str(row_counter)].value = severity\n ws[col_status + str(row_counter)].value = status\n ws[col_link + str(row_counter)].value = link\n # if link != None:\n # link_elements = link.split('#')\n # link_address = link_elements[0]\n # if len(link_elements) > 1:\n # link_subaddress = link_elements[1]\n # else:\n # link_subaddress = \"\"\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\n ws[col_training + str(row_counter)].value = training\n # if training != None:\n # training_elements = training.split('#')\n # training_address = training_elements[0]\n # if len(training_elements) > 1:\n # training_subaddress = training_elements[1]\n # else:\n # training_subaddress = \"\"\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\n # GUID and ARG queries\n ws[col_arg_success + str(row_counter)].value = graph_query_success\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\n ws[col_guid + str(row_counter)].value = guid\n # Next row\n row_counter += 1\n\n # Display summary\n if args.verbose:\n number_of_checks = row_counter - row1\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\n\n # Get worksheet\n try:\n wsv = wb[worksheet_values_name]\n if args.verbose:\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\n except Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\n sys.exit(1)\n\n # Update categories\n row_counter = values_row1\n for item in checklist_data.get(\"categories\"):\n area = item.get(\"name\")\n wsv[col_values_area + str(row_counter)].value = area\n row_counter += 1\n\n # Display summary\n if args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"categories addedd to Excel spreadsheet\")\n\n # Update status\n row_counter = values_row1\n for item in checklist_data.get(\"status\"):\n status = item.get(\"name\")\n description = item.get(\"description\")\n wsv[col_values_status + str(row_counter)].value = status\n wsv[col_values_description + str(row_counter)].value = description\n row_counter += 1\n\n # Display summary\n if args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\n\n # Update severities\n row_counter = values_row1\n for item in checklist_data.get(\"severities\"):\n severity = item.get(\"name\")\n wsv[col_values_severity + str(row_counter)].value = severity\n row_counter += 1\n\n # Display summary\n if args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"severities addedd to Excel spreadsheet\")\n\n # Data validation\n # dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\n dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True)\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\n if args.verbose:\n print(\"DEBUG: adding data validation to range\", rangevar)\n dv.add(rangevar)\n ws.add_data_validation(dv)\n\n # Close book\n if args.verbose:\n print(\"DEBUG: saving workbook\", output_excel_file)\n try:\n wb.save(output_excel_file)\n except Exception as e:\n print(\"ERROR: Error when saving Excel file to\", output_excel_file, \"-\", str(e))\n sys.exit(1)\n\n########\n# Main #\n########\n\n# Download checklist\nif checklist_file:\n checklist_file_list = checklist_file.split(\" \")\n # If --only-english parameter was supplied, take only the English version and remove duplicates\n if args.only_english:\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\n checklist_file_list = list(set(checklist_file_list))\n if args.verbose:\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\n # If --find-all paramater was supplied, find all the languages for the checklist\n if args.find_all:\n new_file_list = []\n for checklist_file in checklist_file_list:\n filedir = os.path.dirname(checklist_file)\n filebase = os.path.basename(checklist_file)\n filebase_noext = filebase[:-8] # Remove '.en.json'\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\n for checklist_match in file_match_list:\n # new_file_list.append(os.path.join(filedir, checklist_match))\n new_file_list.append(checklist_match)\n checklist_file_list = list(set(new_file_list))\n if args.verbose:\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\n # Go over the list\n for checklist_file in checklist_file_list:\n if args.verbose:\n print(\"DEBUG: Opening checklist file\", checklist_file)\n # Get JSON\n try:\n with open(checklist_file) as f:\n checklist_data = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\n sys.exit(0)\n # Set input and output files\n input_excel_file = excel_file\n if args.output_excel_file:\n output_excel_file = args.output_excel_file\n elif args.output_name_is_input_name:\n if args.output_path:\n # Get filename without path and extension\n output_excel_file = os.path.splitext(os.path.basename(checklist_file))[0] + '.xlsx'\n output_excel_file = os.path.join(args.output_path, output_excel_file)\n else:\n # Just change the extension\n output_excel_file = os.path.splitext(checklist_file)[0] + '.xlsx'\n # Update spreadsheet\n update_excel_file(input_excel_file, output_excel_file, checklist_data)\nelse:\n if technology:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\" + technology + \"_checklist.en.json\"\n else:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\"\n if args.verbose:\n print(\"DEBUG: Downloading checklist file from\", checklist_url)\n response = requests.get(checklist_url)\n # If download was successful\n if response.status_code == 200:\n if args.verbose:\n print (\"DEBUG: File {0} downloaded successfully\".format(checklist_url))\n try:\n # Deserialize JSON to object variable\n checklist_data = json.loads(response.text)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\n # Upload spreadsheet\n if args.output_excel_file:\n output_excel_file = args.output_excel_file\n else:\n output_excel_file = excel_file\n update_excel_file(excel_file, output_excel_file, checklist_data)\n\n\nscripts/translate.py METASEP\nimport requests\nimport os\nimport argparse\nimport sys\nimport json\nimport uuid\n\n# Variables\ntranslate_keys = ('description', 'name', 'category', 'subcategory', 'text', 'severity')\ntranslate_languages = ['es', 'ja', 'pt', 'ko']\n\n# Get environment variables\ntranslator_endpoint = os.environ[\"AZURE_TRANSLATOR_ENDPOINT\"]\ntranslator_region = os.environ[\"AZURE_TRANSLATOR_REGION\"]\ntranslator_key = os.environ[\"AZURE_TRANSLATOR_SUBSCRIPTION_KEY\"]\ntranslator_url = translator_endpoint + 'translate'\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Translate a JSON file')\nparser.add_argument('--input-file-name', dest='file_name_in', action='store',\n help='you need to supply file name where your JSON to be translated is located')\nparser.add_argument('--output-file-name', dest='file_name_out', action='store',\n help='you need to supply file name where the translated JSON will be saved')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\n\n# Check we have all information\nif translator_endpoint and translator_region and translator_key:\n if args.verbose:\n print('DEBUG: environment variables retrieved successfully: {0}, {1}, {2}'.format(translator_endpoint, translator_region, translator_key))\nelse:\n print('ERROR: couldnt retrieve environment variables for translation')\n sys.exit(1)\n\n# Get JSON\ntry:\n with open(args.file_name_in) as f:\n checklist = json.load(f)\nexcept Exception as e:\n print(\"ERROR: Error when processing JSON file\", args.file_name_in, \"-\", str(e))\n sys.exit(1)\n\n# Function to translate a single line of text to a single language\ndef translate_text(text_to_translate, languages):\n if args.verbose:\n print('DEBUG: translating text \"{0}\" on {1}...'.format(text_to_translate, translator_url))\n # If a single languages specified, convert to array\n if not type(languages) == list:\n languages = [languages]\n # Azure Translator parameters\n translator_params = {\n 'api-version': '3.0',\n 'from': 'en',\n 'to': languages\n }\n translator_headers = {\n 'Ocp-Apim-Subscription-Key': translator_key,\n 'Ocp-Apim-Subscription-Region': translator_region,\n 'Content-type': 'application/json',\n 'Accept': 'application/json',\n 'X-ClientTraceId': str(uuid.uuid4())\n }\n translator_body = [{\n 'text': text_to_translate\n }]\n if args.verbose:\n print (\"DEBUG: sending body\", str(translator_body))\n print (\"DEBUG: sending HTTP headers\", str(translator_headers))\n print (\"DEBUG: sending parameters\", str(translator_params))\n try:\n request = requests.post(translator_url, params=translator_params, headers=translator_headers, json=translator_body)\n response = request.json()\n if args.verbose:\n print(\"DEBUG: translator response:\")\n print(json.dumps(response, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': ')))\n return str(response[0]['translations'][0]['text'])\n except Exception as e:\n print(\"ERROR: Error in translation:\", str(e))\n\n# Go over all keys and translate them if required\ndef translate_object(checklist_object, language):\n translated_object = checklist_object.copy()\n for (k, v) in translated_object.items():\n if isinstance(v, list):\n translated_items = []\n for list_item in v:\n translated_items.append(translate_object(list_item, language))\n translated_object[k] = translated_items\n else:\n if k in translate_keys:\n # print(\"Found key\", k, \"and scalar value\", v)\n translated_object[k] = translate_text(v, language)\n return translated_object\n\n################\n# Main #\n################\n\nif args.verbose:\n print(\"DEBUG: Starting translations for languages\", str(translate_languages))\n\nfor using_language in translate_languages:\n print(\"INFO: Starting translation to\", using_language)\n translated_checklist = translate_object(checklist, using_language)\n # If no output file was specified, use the input file, and append the language as extension before .json\n if not args.file_name_out:\n file_name_in_base = os.path.basename(args.file_name_in)\n file_name_in_dir = os.path.dirname(args.file_name_in)\n file_name_in_noext = file_name_in_base.split('.')[0]\n file_name_out = file_name_in_noext + '.' + using_language + '.json'\n file_name_out = os.path.join(file_name_in_dir, file_name_out)\n print(\"INFO: saving output file to\", file_name_out)\n translated_checklist_string = json.dumps(translated_checklist, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\n with open(file_name_out, 'w', encoding='utf-8') as f:\n f.write(translated_checklist_string)\n f.close()\n # print(json.dumps(translated_checklist, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': ')))\n\nscripts/sort_checklist.py METASEP\n#################################################################################\n#\n# This script sorts a specific checklist and saves it.\n# \n# Last updated: January 2023\n#\n#################################################################################\n\nimport json\nimport argparse\nimport sys\nimport requests\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--input-file', dest='input_file', action='store',\n help='You need to supply the name of the JSON file with the checklist to be filtered')\nparser.add_argument('--output-file', dest='output_file', action='store',\n help='You can optionally supply the name of a new JSON file that will be used to save the output. Otherwise the sorted checklist will replace the unused one')\nparser.add_argument('--dry-run', dest='dry_run', action='store_true',\n default=False,\n help='do not save anything, only output to console (default: False)')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\n\nif not args.input_file:\n print(\"ERROR: no input file specified, not doing anything\")\n\n# Load the checklist\ntry:\n with open(args.input_file) as f:\n checklist = json.load(f)\nexcept Exception as e:\n print(\"ERROR: Error when processing JSON file, nothing changed\", args.input_file, \"-\", str(e))\n\n# Sort the items per category and subcategory\nitems = checklist['items']\nitems = sorted(items, key=lambda k: (k['category'],k[\"subcategory\"]))\nchecklist['items'] = items\n\n# If dry-run, show on screen\nif args.dry_run:\n print(json.dumps(checklist, indent=4))\n\n# Saving output file if specified in the argument\nif not args.dry_run:\n if args.output_file:\n output_file = args.output_file\n else:\n output_file = args.input_file\n if args.verbose:\n print(\"DEBUG: saving output file to\", output_file)\n checklist_string = json.dumps(checklist, indent=4)\n with open(output_file, 'w', encoding='utf-8') as f:\n f.write(checklist_string)\n f.close()\n\nscripts/compile_checklist.py METASEP\n#################################################################################\n#\n# This script attempts to build a unified checklist out of all the different checklists\n# stored in this repo, and optionally filter it per design area.\n# \n# Last updated: June 2022\n#\n#################################################################################\n\nimport json\nimport argparse\nimport sys\nimport requests\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--output-file', dest='output_file', action='store',\n help='You can optionally supply the name of the JSON file that will be created. Otherwise no output will be generated')\nparser.add_argument('--category', dest='category_filter', action='store',\n help='You can optionally provide a category name as a filter')\nparser.add_argument('--checklist-name', dest='new_checklist_name', action='store',\n default='Combined checklist',\n help='You can optionally provide a category name as a filter')\nparser.add_argument('--print-categories', dest='print_categories', action='store_true',\n default=False,\n help='print the categories of the combined checklist (default: False)')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\n\nif args.category_filter:\n category_filter = args.category_filter.lower()\n\n# Variables\nrepo_contents_url = 'https://api.github.com/repos/azure/review-checklists/contents/checklists'\n\n# Get existing checklists in the repo\nresponse = requests.get(repo_contents_url)\n# If download was successful\nif response.status_code == 200:\n if args.verbose:\n print (\"DEBUG: Github contents downloaded successfully from {0}\".format(repo_contents_url))\n try:\n content_data = json.loads(response.text)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\n\n# Get the list of checklist files\nchecklist_urls = []\nif content_data:\n for github_object in content_data:\n if github_object['name'][-7:] == 'en.json':\n checklist_urls.append(github_object['download_url'])\nelse:\n print(\"Error deserializing JSON content from GitHub repository contents: {0}\".format(str(e)))\n sys.exit(1)\nif args.verbose:\n print(\"DEBUG: {0} checklists found\".format(str(len(checklist_urls))))\n\n# Load all of the items in memory\nnew_checklist = { \n 'items': [],\n 'status': [\n {'name': 'Not verified', 'description': 'This check has not been looked at yet'},\n {'name': 'Open', 'description': 'There is an action item associated to this check'},\n {'name': 'Fulfilled', 'description': 'This check has been verified, and there are no further action items associated to it'},\n {'name': 'Not required', 'description': 'Recommendation understood, but not needed by current requirements'},\n {'name': 'N/A', 'description': 'Not applicable for current design'}\n ],\n 'severities': [ {'name': 'High'}, {'name': 'Medium'}, {'name': 'Low'} ],\n 'categories': [],\n 'metadata': { 'name': args.new_checklist_name }\n }\nfor checklist_url in checklist_urls:\n if args.verbose:\n print(\"DEBUG: Downloading checklist file from\", checklist_url)\n response = requests.get(checklist_url)\n if response.status_code == 200:\n if args.verbose:\n print (\"DEBUG: File {0} downloaded successfully\".format(checklist_url))\n try:\n # Deserialize JSON to object variable\n checklist_data = json.loads(response.text)\n checklist_name = checklist_data['metadata']['name']\n for item in checklist_data['items']:\n if checklist_name:\n item['checklist'] = checklist_name\n item_category = str(item['category']).lower()\n if not args.category_filter or item_category.__contains__(category_filter):\n new_checklist['items'].append(item)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\nif args.verbose:\n print(\"DEBUG: Resulting combined checklist has {0} items\".format(str(len(new_checklist['items']))))\n\n# Add the categories to the new checklist\ncategories = []\nfor item in new_checklist['items']:\n category_name=item['checklist'] + '/' + item['category']\n if not category_name in categories:\n categories.append(category_name)\nif args.verbose:\n print(\"DEBUG: {0} categories found\".format(str(len(categories))))\nfor category in categories:\n new_checklist['categories'].append({'name': category})\n if args.print_categories:\n print(category)\n\n# Saving output file if specified in the argument\nif args.output_file:\n if args.verbose:\n print(\"DEBUG: saving output file to\", args.output_file)\n new_checklist_string = json.dumps(new_checklist)\n with open(args.output_file, 'w', encoding='utf-8') as f:\n f.write(new_checklist_string)\n f.close()\n\nscripts/checklist_graph_update.py METASEP\n#################################################################################\n#\n# This is a study on two libraries to update Excel files: openpyxl and xlwings\n# This exercise has shown that openpyxl breaks the xlsx files in this repo (maybe\n# because of the macros, or the formulae), while xlwings works fine.\n#\n# This script reads a previously generated JSON file with the results of Azure\n# Resource Graph queries, and stores them in the 'Comments' column of a\n# spreadsheet. Both the JSON file and the spreadsheet file are supplied as\n# parameters.\n# \n# Last updated: March 2022\n#\n#################################################################################\n\nimport json\nimport argparse\nimport sys\nfrom pandas import DataFrame\nfrom openpyxl import load_workbook\nimport xlwings as xw\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--graph-file', dest='graph_file', action='store',\n help='You need to supply a JSON file containing the results of Azure Resource Graph Queries')\nparser.add_argument('--excel-file', dest='excel_file', action='store',\n help='You need to supply an Excel file where the query results will be stored')\nparser.add_argument('--mode', dest='mode', action='store', default=\"openpyxl\",\n help='It can be either xlwings or openpyxl (default is openpyxl)')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\ngraph_file = args.graph_file\nexcel_file = args.excel_file\nmode = args.mode\n\n# Constants\nguid_column_index = \"K\"\ncomment_column_index = \"G\"\nsample_cell_index = 'A4'\n\n# Get JSON\ntry:\n with open(graph_file) as f:\n graph_data = json.load(f)\nexcept Exception as e:\n print(\"ERROR: Error when processing JSON file\", graph_file, \"-\", str(e))\n sys.exit(1)\n\n# Load workbook\ntry:\n if mode == 'openpyxl':\n if args.verbose:\n print(\"DEBUG: working with openpyxl library\")\n wb = load_workbook(filename = excel_file)\n ws = wb['Checklist']\n elif mode == 'xlwings':\n if args.verbose:\n print(\"DEBUG: working with xlwings library\")\n wb = xw.Book(excel_file)\n ws = wb.sheets['Checklist']\n else:\n print(\"ERROR: mode {0} not recognized\".format(mode))\nexcept Exception as e:\n print(\"ERROR: Error when opening Excel file\", excel_file, \"-\", str(e))\n sys.exit(1)\n\n# Print specific cell\nif args.verbose:\n print(\"DEBUG: looking at spreadsheet for\", ws[sample_cell_index].value)\n\n# Get GUID column into a list\nif mode == 'openpyxl':\n guid_col = ws[guid_column_index]\n guid_col_values = [x.value for x in guid_col]\n if args.verbose:\n print(\"DEBUG: GUID column retrieved with\", str(len(guid_col_values)), \"values\")\nelif mode == 'xlwings':\n guid_col_values = ws.range(guid_column_index + \":\" + guid_column_index).value\n if args.verbose:\n print(\"DEBUG: GUID column retrieved with\", str(len(guid_col_values)), \"values\")\nelse:\n print(\"ERROR: mode {0} not recognized\".format(mode))\n sys.exit(1)\n\n# Go over all checks in the JSON file\nfor check in graph_data['checks']:\n guid = check['guid']\n arm_id = check['id']\n compliant = check['compliant']\n if (compliant == \"false\"):\n comment = \"Non-compliant: {0}\\n\".format(arm_id)\n elif (compliant == \"true\"):\n comment = \"Compliant: {0}\\n\".format(arm_id)\n else:\n print(\"ERROR: compliant status {0} not recognized\".format(compliant))\n # Find the guid in the list\n if guid in guid_col_values:\n row = guid_col_values.index(guid)\n cell_index = comment_column_index + str(row)\n print(\"DEBUG: updating cell\", cell_index)\n if mode == 'openpyxl':\n ws[cell_index] = comment\n elif mode == 'xlwings':\n ws.range(cell_index).value = comment\n else:\n print(\"ERROR: could not find GUID {0} in the Excel list\".format(guid))\n\n# Saving file\nif mode == 'openpyxl':\n print(\"DEBUG: saving workbook\", excel_file)\n try:\n wb.save(excel_file)\n except Exception as e:\n print(\"ERROR: Error when saving Excel file\", excel_file, \"-\", str(e))\n sys.exit(1)\nelif mode == 'xlwings':\n print(\"DEBUG: saving workbook\", excel_file)\n try:\n wb.save()\n except Exception as e:\n print(\"ERROR: Error when saving Excel file\", excel_file, \"-\", str(e))\n sys.exit(1)\nelse:\n print(\"ERROR: mode {0} not recognized\".format(mode))\n\nscripts/workbook_create.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r","type":"infile"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r\nload_building_blocks()\r\nif args.verbose:\r\n print (\"DEBUG: building blocks variables intialized:\")\r\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\r\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\r\n print (\"DEBUG: - Query: {0}\".format(str(block_query)))\r\n\r\n# Download checklist or process from local file\r\nif checklist_file:\r\n checklist_file_list = checklist_file.split(\" \")\r\n # If --only-english parameter was supplied, take only the English version and remove duplicates\r\n if args.only_english:\r\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\r\n checklist_file_list = list(set(checklist_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # If --find-all paramater was supplied, find all the languages for the checklist\r\n if args.find_all:\r\n new_file_list = []\r\n for checklist_file in checklist_file_list:\r\n filedir = os.path.dirname(checklist_file)\r\n filebase = os.path.basename(checklist_file)\r\n filebase_noext = filebase[:-8] # Remove '.en.json'\r\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\r\n for checklist_match in file_match_list:\r\n # new_file_list.append(os.path.join(filedir, checklist_match))\r\n new_file_list.append(checklist_match)\r\n checklist_file_list = list(set(new_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # Go over the list(s)\r\n for checklist_file in checklist_file_list:\r\n if args.verbose:\r\n print(\"DEBUG: Opening checklist file\", checklist_file)\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Set output files\r\n output_file = get_output_file(checklist_file, is_file=True)\r\n # Generate workbook\r\n generate_workbook(output_file, checklist_data)\r\nelse:\r\n # If no input files specified, fetch the latest from Github...\r\n if technology:\r\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\" + technology + \"_checklist.en.json\"\r\n else:\r\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\"\r\n if args.verbose:\r\n print(\"DEBUG: Downloading checklist file from\", checklist_url)\r\n response = requests.get(checklist_url)\r\n # If download was successful\r\n if response.status_code == 200:\r\n if args.verbose:\r\n print (\"DEBUG: File {0} downloaded successfully\".format(checklist_url))\r\n try:\r\n # Deserialize JSON to object variable\r\n checklist_data = json.loads(response.text)\r\n except Exception as e:\r\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\r\n sys.exit(1)\r\n # Set output files\r","type":"infile"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r\nload_building_blocks()\r\nif args.verbose:\r\n print (\"DEBUG: building blocks variables intialized:\")\r\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\r\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\r\n print (\"DEBUG: - Query: {0}\".format(str(block_query)))\r\n\r\n# Download checklist or process from local file\r\nif checklist_file:\r\n checklist_file_list = checklist_file.split(\" \")\r\n # If --only-english parameter was supplied, take only the English version and remove duplicates\r\n if args.only_english:\r\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\r\n checklist_file_list = list(set(checklist_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # If --find-all paramater was supplied, find all the languages for the checklist\r\n if args.find_all:\r\n new_file_list = []\r\n for checklist_file in checklist_file_list:\r\n filedir = os.path.dirname(checklist_file)\r\n filebase = os.path.basename(checklist_file)\r\n filebase_noext = filebase[:-8] # Remove '.en.json'\r\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\r\n for checklist_match in file_match_list:\r\n # new_file_list.append(os.path.join(filedir, checklist_match))\r\n new_file_list.append(checklist_match)\r\n checklist_file_list = list(set(new_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # Go over the list(s)\r\n for checklist_file in checklist_file_list:\r\n if args.verbose:\r\n print(\"DEBUG: Opening checklist file\", checklist_file)\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Set output files\r\n output_file = get_output_file(checklist_file, is_file=True)\r\n # Generate workbook\r","type":"infile"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r\nload_building_blocks()\r\nif args.verbose:\r\n print (\"DEBUG: building blocks variables intialized:\")\r\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\r\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\r\n print (\"DEBUG: - Query: {0}\".format(str(block_query)))\r\n\r\n# Download checklist or process from local file\r\nif checklist_file:\r\n checklist_file_list = checklist_file.split(\" \")\r\n # If --only-english parameter was supplied, take only the English version and remove duplicates\r\n if args.only_english:\r\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\r\n checklist_file_list = list(set(checklist_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # If --find-all paramater was supplied, find all the languages for the checklist\r\n if args.find_all:\r\n new_file_list = []\r\n for checklist_file in checklist_file_list:\r\n filedir = os.path.dirname(checklist_file)\r\n filebase = os.path.basename(checklist_file)\r\n filebase_noext = filebase[:-8] # Remove '.en.json'\r\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\r\n for checklist_match in file_match_list:\r\n # new_file_list.append(os.path.join(filedir, checklist_match))\r\n new_file_list.append(checklist_match)\r\n checklist_file_list = list(set(new_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # Go over the list(s)\r\n for checklist_file in checklist_file_list:\r\n if args.verbose:\r\n print(\"DEBUG: Opening checklist file\", checklist_file)\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Set output files\r","type":"infile"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r\nload_building_blocks()\r\nif args.verbose:\r\n print (\"DEBUG: building blocks variables intialized:\")\r\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\r\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\r\n print (\"DEBUG: - Query: {0}\".format(str(block_query)))\r\n\r\n# Download checklist or process from local file\r\nif checklist_file:\r\n checklist_file_list = checklist_file.split(\" \")\r\n # If --only-english parameter was supplied, take only the English version and remove duplicates\r\n if args.only_english:\r\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\r\n checklist_file_list = list(set(checklist_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # If --find-all paramater was supplied, find all the languages for the checklist\r\n if args.find_all:\r\n new_file_list = []\r\n for checklist_file in checklist_file_list:\r\n filedir = os.path.dirname(checklist_file)\r\n filebase = os.path.basename(checklist_file)\r\n filebase_noext = filebase[:-8] # Remove '.en.json'\r\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\r\n for checklist_match in file_match_list:\r\n # new_file_list.append(os.path.join(filedir, checklist_match))\r\n new_file_list.append(checklist_match)\r\n checklist_file_list = list(set(new_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # Go over the list(s)\r\n for checklist_file in checklist_file_list:\r\n if args.verbose:\r\n print(\"DEBUG: Opening checklist file\", checklist_file)\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r\nload_building_blocks()\r\nif args.verbose:\r\n print (\"DEBUG: building blocks variables intialized:\")\r\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\r\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\r\n print (\"DEBUG: - Query: {0}\".format(str(block_query)))\r\n\r\n# Download checklist or process from local file\r\nif checklist_file:\r\n checklist_file_list = checklist_file.split(\" \")\r\n # If --only-english parameter was supplied, take only the English version and remove duplicates\r\n if args.only_english:\r\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\r\n checklist_file_list = list(set(checklist_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # If --find-all paramater was supplied, find all the languages for the checklist\r\n if args.find_all:\r\n new_file_list = []\r\n for checklist_file in checklist_file_list:\r\n filedir = os.path.dirname(checklist_file)\r\n filebase = os.path.basename(checklist_file)\r\n filebase_noext = filebase[:-8] # Remove '.en.json'\r\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\r\n for checklist_match in file_match_list:\r\n # new_file_list.append(os.path.join(filedir, checklist_match))\r\n new_file_list.append(checklist_match)\r\n checklist_file_list = list(set(new_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # Go over the list(s)\r\n for checklist_file in checklist_file_list:\r\n if args.verbose:\r\n print(\"DEBUG: Opening checklist file\", checklist_file)\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Set output files\r\n output_file = get_output_file(checklist_file, is_file=True)\r\n # Generate workbook\r\n generate_workbook(output_file, checklist_data)\r\nelse:\r\n # If no input files specified, fetch the latest from Github...\r\n if technology:\r\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\" + technology + \"_checklist.en.json\"\r\n else:\r\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\"\r\n if args.verbose:\r\n print(\"DEBUG: Downloading checklist file from\", checklist_url)\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r","type":"non_informative"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r\nload_building_blocks()\r\nif args.verbose:\r\n print (\"DEBUG: building blocks variables intialized:\")\r\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\r\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\r\n print (\"DEBUG: - Query: {0}\".format(str(block_query)))\r\n\r\n# Download checklist or process from local file\r\nif checklist_file:\r\n checklist_file_list = checklist_file.split(\" \")\r\n # If --only-english parameter was supplied, take only the English version and remove duplicates\r\n if args.only_english:\r\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\r\n checklist_file_list = list(set(checklist_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # If --find-all paramater was supplied, find all the languages for the checklist\r\n if args.find_all:\r\n new_file_list = []\r\n for checklist_file in checklist_file_list:\r\n filedir = os.path.dirname(checklist_file)\r\n filebase = os.path.basename(checklist_file)\r\n filebase_noext = filebase[:-8] # Remove '.en.json'\r\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\r\n for checklist_match in file_match_list:\r\n # new_file_list.append(os.path.join(filedir, checklist_match))\r\n new_file_list.append(checklist_match)\r\n checklist_file_list = list(set(new_file_list))\r\n if args.verbose:\r","type":"non_informative"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r\nload_building_blocks()\r\nif args.verbose:\r\n print (\"DEBUG: building blocks variables intialized:\")\r\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\r\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\r","type":"non_informative"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r","type":"non_informative"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r\n if output_file:\r\n with open(output_file, 'w', encoding='utf-8') as f:\r\n f.write(workbook_string)\r\n f.close()\r\n else:\r\n print(workbook_string)\r\n else:\r\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\r\n\r\ndef get_output_file(checklist_file_or_url, is_file=True):\r\n if is_file:\r\n output_file = os.path.basename(checklist_file_or_url)\r\n else:\r\n output_file = checklist_file_or_url.split('/')[-1]\r\n if args.output_file:\r\n return args.output_file\r\n elif args.output_path:\r\n # Get filename without path and extension\r\n output_file = os.path.join(args.output_path, output_file)\r\n return os.path.splitext(output_file)[0] + '_workbook.json'\r\n else:\r\n output_file = None\r\n\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# First thing of all, load the building blocks\r\nload_building_blocks()\r\nif args.verbose:\r\n print (\"DEBUG: building blocks variables intialized:\")\r\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\r\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\r\n print (\"DEBUG: - Query: {0}\".format(str(block_query)))\r\n\r\n# Download checklist or process from local file\r\nif checklist_file:\r\n checklist_file_list = checklist_file.split(\" \")\r\n # If --only-english parameter was supplied, take only the English version and remove duplicates\r\n if args.only_english:\r\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\r\n checklist_file_list = list(set(checklist_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # If --find-all paramater was supplied, find all the languages for the checklist\r\n if args.find_all:\r\n new_file_list = []\r\n for checklist_file in checklist_file_list:\r\n filedir = os.path.dirname(checklist_file)\r\n filebase = os.path.basename(checklist_file)\r\n filebase_noext = filebase[:-8] # Remove '.en.json'\r\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\r\n for checklist_match in file_match_list:\r\n # new_file_list.append(os.path.join(filedir, checklist_match))\r\n new_file_list.append(checklist_match)\r\n checklist_file_list = list(set(new_file_list))\r\n if args.verbose:\r\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r\n # Go over the list(s)\r\n for checklist_file in checklist_file_list:\r\n if args.verbose:\r\n print(\"DEBUG: Opening checklist file\", checklist_file)\r\n # Get JSON\r","type":"random"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r","type":"random"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r\n new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\r\n # Add link and query to workbook\r\n # if args.verbose:\r\n # print()\r\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\r\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\r\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\r\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\r\n # Add section to workbook\r\n new_new_section=json.loads(json.dumps(new_section.copy()))\r\n workbook['items'].append(new_new_section)\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n graph_query = item.get(\"graph\")\r\n if graph_query:\r\n query_id += 1\r\n # Create new text\r\n new_text = block_text.copy()\r\n new_text['name'] = 'querytext' + str(query_id)\r\n new_text['content']['json'] = text\r\n if link:\r\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\r\n if training:\r\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\r\n # Create new query\r\n new_query = block_query.copy()\r\n new_query['name'] = 'query' + str(query_id)\r\n new_query['content']['query'] = graph_query\r\n new_query['content']['size'] = query_size\r\n # Add text and query to the workbook\r\n category_id = category_dict[category]\r\n if args.verbose:\r\n print (\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\".format(str(category_id), workbook['items'][category_id]['name']))\r\n new_new_text=json.loads(json.dumps(new_text.copy()))\r\n new_new_query=json.loads(json.dumps(new_query.copy()))\r\n workbook['items'][category_id]['content']['items'].append(new_new_text)\r\n workbook['items'][category_id]['content']['items'].append(new_new_query)\r\n\r\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\r\n if query_id > 0:\r\n workbook_string = json.dumps(workbook, indent=4)\r","type":"random"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r","type":"random"},{"content":"######################################################################\r\n#\r\n# This script reads the checklist items from the latest checklist file\r\n# in Github (or from a local file) and generates an Azure Monitor\r\n# workbook in JSON format.\r\n# \r\n# Last updated: February 2023\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport uuid\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\r\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\r\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\r\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\r\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\r\nparser.add_argument('--technology', dest='technology', action='store',\r\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\r\nparser.add_argument('--output-file', dest='output_file', action='store',\r\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\r\nparser.add_argument('--output-path', dest='output_path', action='store',\r\n help='Folder where to store the results (using the same name as the input_file)')\r\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\r\n help='Folder where the building blocks to build the workbook are stored)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\nchecklist_file = args.checklist_file\r\ntechnology = args.technology\r\n\r\nblock_workbook = None\r\nblock_link = None\r\nblock_section = None\r\nblock_query = None\r\nblock_text = None\r\n\r\nquery_size = 4 # 0: medium, 1: small, 4: tiny\r\n\r\n# Workbook building blocks\r\ndef load_building_blocks():\r\n\r\n # Define the blocks as global variables\r\n global block_workbook\r\n global block_link\r\n global block_section\r\n global block_query\r\n global block_text\r\n\r\n # Set folder where to load from\r\n if args.blocks_path:\r\n blocks_path = args.blocks_path\r\n if args.verbose:\r\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\r\n else:\r\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\r\n sys.exit(1)\r\n\r\n # Load initial workbook building block\r\n block_file = os.path.join(blocks_path, 'block_workbook.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_workbook = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load link building block\r\n block_file = os.path.join(blocks_path, 'block_link.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_link = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load itemgroup (aka section) building block\r\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_section = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load query building block\r\n block_file = os.path.join(blocks_path, 'block_query.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_query = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n # Load text building block\r\n block_file = os.path.join(blocks_path, 'block_text.json')\r\n if args.verbose:\r\n print (\"DEBUG: Loading file {0}...\".format(block_file))\r\n try:\r\n with open(block_file) as f:\r\n block_text = json.load(f)\r\n except Exception as e:\r\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\r\n sys.exit(0)\r\n\r\n# Main function to generate the workbook JSON\r\ndef generate_workbook(output_file, checklist_data):\r\n\r\n # Initialize an empty workbook\r\n workbook = block_workbook\r\n\r\n # Generate one tab in the workbook for each category\r\n category_id = 0\r\n query_id = 0\r\n category_dict = {}\r\n for item in checklist_data.get(\"categories\"):\r\n category_title = item.get(\"name\")\r\n category_id += 1\r\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\r\n # Create new link\r\n new_link = block_link.copy()\r\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\r\n new_link['linkLabel'] = category_title\r\n new_link['subTarget'] = 'category' + str(category_id)\r\n new_link['preText'] = category_title\r\n # Create new section\r\n new_section = block_section.copy()\r\n new_section['name'] = 'category' + str(category_id)\r\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\r","type":"random"}],"string":"[\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\\nload_building_blocks()\\r\\nif args.verbose:\\r\\n print (\\\"DEBUG: building blocks variables intialized:\\\")\\r\\n print (\\\"DEBUG: - Workbook: {0}\\\".format(str(block_workbook)))\\r\\n print (\\\"DEBUG: - Link: {0}\\\".format(str(block_link)))\\r\\n print (\\\"DEBUG: - Query: {0}\\\".format(str(block_query)))\\r\\n\\r\\n# Download checklist or process from local file\\r\\nif checklist_file:\\r\\n checklist_file_list = checklist_file.split(\\\" \\\")\\r\\n # If --only-english parameter was supplied, take only the English version and remove duplicates\\r\\n if args.only_english:\\r\\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\\r\\n checklist_file_list = list(set(checklist_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # If --find-all paramater was supplied, find all the languages for the checklist\\r\\n if args.find_all:\\r\\n new_file_list = []\\r\\n for checklist_file in checklist_file_list:\\r\\n filedir = os.path.dirname(checklist_file)\\r\\n filebase = os.path.basename(checklist_file)\\r\\n filebase_noext = filebase[:-8] # Remove '.en.json'\\r\\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\\r\\n for checklist_match in file_match_list:\\r\\n # new_file_list.append(os.path.join(filedir, checklist_match))\\r\\n new_file_list.append(checklist_match)\\r\\n checklist_file_list = list(set(new_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # Go over the list(s)\\r\\n for checklist_file in checklist_file_list:\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: Opening checklist file\\\", checklist_file)\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Set output files\\r\\n output_file = get_output_file(checklist_file, is_file=True)\\r\\n # Generate workbook\\r\\n generate_workbook(output_file, checklist_data)\\r\\nelse:\\r\\n # If no input files specified, fetch the latest from Github...\\r\\n if technology:\\r\\n checklist_url = \\\"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\\\" + technology + \\\"_checklist.en.json\\\"\\r\\n else:\\r\\n checklist_url = \\\"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: Downloading checklist file from\\\", checklist_url)\\r\\n response = requests.get(checklist_url)\\r\\n # If download was successful\\r\\n if response.status_code == 200:\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: File {0} downloaded successfully\\\".format(checklist_url))\\r\\n try:\\r\\n # Deserialize JSON to object variable\\r\\n checklist_data = json.loads(response.text)\\r\\n except Exception as e:\\r\\n print(\\\"Error deserializing JSON content: {0}\\\".format(str(e)))\\r\\n sys.exit(1)\\r\\n # Set output files\\r\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\\nload_building_blocks()\\r\\nif args.verbose:\\r\\n print (\\\"DEBUG: building blocks variables intialized:\\\")\\r\\n print (\\\"DEBUG: - Workbook: {0}\\\".format(str(block_workbook)))\\r\\n print (\\\"DEBUG: - Link: {0}\\\".format(str(block_link)))\\r\\n print (\\\"DEBUG: - Query: {0}\\\".format(str(block_query)))\\r\\n\\r\\n# Download checklist or process from local file\\r\\nif checklist_file:\\r\\n checklist_file_list = checklist_file.split(\\\" \\\")\\r\\n # If --only-english parameter was supplied, take only the English version and remove duplicates\\r\\n if args.only_english:\\r\\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\\r\\n checklist_file_list = list(set(checklist_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # If --find-all paramater was supplied, find all the languages for the checklist\\r\\n if args.find_all:\\r\\n new_file_list = []\\r\\n for checklist_file in checklist_file_list:\\r\\n filedir = os.path.dirname(checklist_file)\\r\\n filebase = os.path.basename(checklist_file)\\r\\n filebase_noext = filebase[:-8] # Remove '.en.json'\\r\\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\\r\\n for checklist_match in file_match_list:\\r\\n # new_file_list.append(os.path.join(filedir, checklist_match))\\r\\n new_file_list.append(checklist_match)\\r\\n checklist_file_list = list(set(new_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # Go over the list(s)\\r\\n for checklist_file in checklist_file_list:\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: Opening checklist file\\\", checklist_file)\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Set output files\\r\\n output_file = get_output_file(checklist_file, is_file=True)\\r\\n # Generate workbook\\r\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\\nload_building_blocks()\\r\\nif args.verbose:\\r\\n print (\\\"DEBUG: building blocks variables intialized:\\\")\\r\\n print (\\\"DEBUG: - Workbook: {0}\\\".format(str(block_workbook)))\\r\\n print (\\\"DEBUG: - Link: {0}\\\".format(str(block_link)))\\r\\n print (\\\"DEBUG: - Query: {0}\\\".format(str(block_query)))\\r\\n\\r\\n# Download checklist or process from local file\\r\\nif checklist_file:\\r\\n checklist_file_list = checklist_file.split(\\\" \\\")\\r\\n # If --only-english parameter was supplied, take only the English version and remove duplicates\\r\\n if args.only_english:\\r\\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\\r\\n checklist_file_list = list(set(checklist_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # If --find-all paramater was supplied, find all the languages for the checklist\\r\\n if args.find_all:\\r\\n new_file_list = []\\r\\n for checklist_file in checklist_file_list:\\r\\n filedir = os.path.dirname(checklist_file)\\r\\n filebase = os.path.basename(checklist_file)\\r\\n filebase_noext = filebase[:-8] # Remove '.en.json'\\r\\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\\r\\n for checklist_match in file_match_list:\\r\\n # new_file_list.append(os.path.join(filedir, checklist_match))\\r\\n new_file_list.append(checklist_match)\\r\\n checklist_file_list = list(set(new_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # Go over the list(s)\\r\\n for checklist_file in checklist_file_list:\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: Opening checklist file\\\", checklist_file)\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Set output files\\r\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\\nload_building_blocks()\\r\\nif args.verbose:\\r\\n print (\\\"DEBUG: building blocks variables intialized:\\\")\\r\\n print (\\\"DEBUG: - Workbook: {0}\\\".format(str(block_workbook)))\\r\\n print (\\\"DEBUG: - Link: {0}\\\".format(str(block_link)))\\r\\n print (\\\"DEBUG: - Query: {0}\\\".format(str(block_query)))\\r\\n\\r\\n# Download checklist or process from local file\\r\\nif checklist_file:\\r\\n checklist_file_list = checklist_file.split(\\\" \\\")\\r\\n # If --only-english parameter was supplied, take only the English version and remove duplicates\\r\\n if args.only_english:\\r\\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\\r\\n checklist_file_list = list(set(checklist_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # If --find-all paramater was supplied, find all the languages for the checklist\\r\\n if args.find_all:\\r\\n new_file_list = []\\r\\n for checklist_file in checklist_file_list:\\r\\n filedir = os.path.dirname(checklist_file)\\r\\n filebase = os.path.basename(checklist_file)\\r\\n filebase_noext = filebase[:-8] # Remove '.en.json'\\r\\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\\r\\n for checklist_match in file_match_list:\\r\\n # new_file_list.append(os.path.join(filedir, checklist_match))\\r\\n new_file_list.append(checklist_match)\\r\\n checklist_file_list = list(set(new_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # Go over the list(s)\\r\\n for checklist_file in checklist_file_list:\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: Opening checklist file\\\", checklist_file)\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\\nload_building_blocks()\\r\\nif args.verbose:\\r\\n print (\\\"DEBUG: building blocks variables intialized:\\\")\\r\\n print (\\\"DEBUG: - Workbook: {0}\\\".format(str(block_workbook)))\\r\\n print (\\\"DEBUG: - Link: {0}\\\".format(str(block_link)))\\r\\n print (\\\"DEBUG: - Query: {0}\\\".format(str(block_query)))\\r\\n\\r\\n# Download checklist or process from local file\\r\\nif checklist_file:\\r\\n checklist_file_list = checklist_file.split(\\\" \\\")\\r\\n # If --only-english parameter was supplied, take only the English version and remove duplicates\\r\\n if args.only_english:\\r\\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\\r\\n checklist_file_list = list(set(checklist_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # If --find-all paramater was supplied, find all the languages for the checklist\\r\\n if args.find_all:\\r\\n new_file_list = []\\r\\n for checklist_file in checklist_file_list:\\r\\n filedir = os.path.dirname(checklist_file)\\r\\n filebase = os.path.basename(checklist_file)\\r\\n filebase_noext = filebase[:-8] # Remove '.en.json'\\r\\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\\r\\n for checklist_match in file_match_list:\\r\\n # new_file_list.append(os.path.join(filedir, checklist_match))\\r\\n new_file_list.append(checklist_match)\\r\\n checklist_file_list = list(set(new_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # Go over the list(s)\\r\\n for checklist_file in checklist_file_list:\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: Opening checklist file\\\", checklist_file)\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Set output files\\r\\n output_file = get_output_file(checklist_file, is_file=True)\\r\\n # Generate workbook\\r\\n generate_workbook(output_file, checklist_data)\\r\\nelse:\\r\\n # If no input files specified, fetch the latest from Github...\\r\\n if technology:\\r\\n checklist_url = \\\"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\\\" + technology + \\\"_checklist.en.json\\\"\\r\\n else:\\r\\n checklist_url = \\\"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: Downloading checklist file from\\\", checklist_url)\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\\nload_building_blocks()\\r\\nif args.verbose:\\r\\n print (\\\"DEBUG: building blocks variables intialized:\\\")\\r\\n print (\\\"DEBUG: - Workbook: {0}\\\".format(str(block_workbook)))\\r\\n print (\\\"DEBUG: - Link: {0}\\\".format(str(block_link)))\\r\\n print (\\\"DEBUG: - Query: {0}\\\".format(str(block_query)))\\r\\n\\r\\n# Download checklist or process from local file\\r\\nif checklist_file:\\r\\n checklist_file_list = checklist_file.split(\\\" \\\")\\r\\n # If --only-english parameter was supplied, take only the English version and remove duplicates\\r\\n if args.only_english:\\r\\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\\r\\n checklist_file_list = list(set(checklist_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # If --find-all paramater was supplied, find all the languages for the checklist\\r\\n if args.find_all:\\r\\n new_file_list = []\\r\\n for checklist_file in checklist_file_list:\\r\\n filedir = os.path.dirname(checklist_file)\\r\\n filebase = os.path.basename(checklist_file)\\r\\n filebase_noext = filebase[:-8] # Remove '.en.json'\\r\\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\\r\\n for checklist_match in file_match_list:\\r\\n # new_file_list.append(os.path.join(filedir, checklist_match))\\r\\n new_file_list.append(checklist_match)\\r\\n checklist_file_list = list(set(new_file_list))\\r\\n if args.verbose:\\r\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\\nload_building_blocks()\\r\\nif args.verbose:\\r\\n print (\\\"DEBUG: building blocks variables intialized:\\\")\\r\\n print (\\\"DEBUG: - Workbook: {0}\\\".format(str(block_workbook)))\\r\\n print (\\\"DEBUG: - Link: {0}\\\".format(str(block_link)))\\r\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\\n if output_file:\\r\\n with open(output_file, 'w', encoding='utf-8') as f:\\r\\n f.write(workbook_string)\\r\\n f.close()\\r\\n else:\\r\\n print(workbook_string)\\r\\n else:\\r\\n print(\\\"INFO: sorry, the analyzed checklist did not contain any graph query\\\")\\r\\n\\r\\ndef get_output_file(checklist_file_or_url, is_file=True):\\r\\n if is_file:\\r\\n output_file = os.path.basename(checklist_file_or_url)\\r\\n else:\\r\\n output_file = checklist_file_or_url.split('/')[-1]\\r\\n if args.output_file:\\r\\n return args.output_file\\r\\n elif args.output_path:\\r\\n # Get filename without path and extension\\r\\n output_file = os.path.join(args.output_path, output_file)\\r\\n return os.path.splitext(output_file)[0] + '_workbook.json'\\r\\n else:\\r\\n output_file = None\\r\\n\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# First thing of all, load the building blocks\\r\\nload_building_blocks()\\r\\nif args.verbose:\\r\\n print (\\\"DEBUG: building blocks variables intialized:\\\")\\r\\n print (\\\"DEBUG: - Workbook: {0}\\\".format(str(block_workbook)))\\r\\n print (\\\"DEBUG: - Link: {0}\\\".format(str(block_link)))\\r\\n print (\\\"DEBUG: - Query: {0}\\\".format(str(block_query)))\\r\\n\\r\\n# Download checklist or process from local file\\r\\nif checklist_file:\\r\\n checklist_file_list = checklist_file.split(\\\" \\\")\\r\\n # If --only-english parameter was supplied, take only the English version and remove duplicates\\r\\n if args.only_english:\\r\\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\\r\\n checklist_file_list = list(set(checklist_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # If --find-all paramater was supplied, find all the languages for the checklist\\r\\n if args.find_all:\\r\\n new_file_list = []\\r\\n for checklist_file in checklist_file_list:\\r\\n filedir = os.path.dirname(checklist_file)\\r\\n filebase = os.path.basename(checklist_file)\\r\\n filebase_noext = filebase[:-8] # Remove '.en.json'\\r\\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\\r\\n for checklist_match in file_match_list:\\r\\n # new_file_list.append(os.path.join(filedir, checklist_match))\\r\\n new_file_list.append(checklist_match)\\r\\n checklist_file_list = list(set(new_file_list))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\\n # Go over the list(s)\\r\\n for checklist_file in checklist_file_list:\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: Opening checklist file\\\", checklist_file)\\r\\n # Get JSON\\r\",\n \"type\": \"random\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\",\n \"type\": \"random\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\\n new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\\n new_section['content']['items'][0]['name'] = 'category' + str(category_id) + 'title'\\r\\n # Add link and query to workbook\\r\\n # if args.verbose:\\r\\n # print()\\r\\n # print (\\\"DEBUG: Adding link: {0}\\\".format(json.dumps(new_link)))\\r\\n # print (\\\"DEBUG: Adding section: {0}\\\".format(json.dumps(new_section)))\\r\\n # print(\\\"DEBUG: Workbook so far: {0}\\\".format(json.dumps(workbook)))\\r\\n workbook['items'][1]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\\r\\n # Add section to workbook\\r\\n new_new_section=json.loads(json.dumps(new_section.copy()))\\r\\n workbook['items'].append(new_new_section)\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n graph_query = item.get(\\\"graph\\\")\\r\\n if graph_query:\\r\\n query_id += 1\\r\\n # Create new text\\r\\n new_text = block_text.copy()\\r\\n new_text['name'] = 'querytext' + str(query_id)\\r\\n new_text['content']['json'] = text\\r\\n if link:\\r\\n new_text['content']['json'] += \\\". Check [this link](\\\" + link + \\\") for further information.\\\"\\r\\n if training:\\r\\n new_text['content']['json'] += \\\". [This training](\\\" + training + \\\") can help to educate yourself on this.\\\"\\r\\n # Create new query\\r\\n new_query = block_query.copy()\\r\\n new_query['name'] = 'query' + str(query_id)\\r\\n new_query['content']['query'] = graph_query\\r\\n new_query['content']['size'] = query_size\\r\\n # Add text and query to the workbook\\r\\n category_id = category_dict[category]\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Adding text and query to category ID {0}, workbook object name is {1}\\\".format(str(category_id), workbook['items'][category_id]['name']))\\r\\n new_new_text=json.loads(json.dumps(new_text.copy()))\\r\\n new_new_query=json.loads(json.dumps(new_query.copy()))\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_text)\\r\\n workbook['items'][category_id]['content']['items'].append(new_new_query)\\r\\n\\r\\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\\r\\n if query_id > 0:\\r\\n workbook_string = json.dumps(workbook, indent=4)\\r\",\n \"type\": \"random\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\",\n \"type\": \"random\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script reads the checklist items from the latest checklist file\\r\\n# in Github (or from a local file) and generates an Azure Monitor\\r\\n# workbook in JSON format.\\r\\n# \\r\\n# Last updated: February 2023\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport uuid\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\\r\\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\\r\\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\\r\\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\\r\\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\\r\\nparser.add_argument('--technology', dest='technology', action='store',\\r\\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\\r\\nparser.add_argument('--output-file', dest='output_file', action='store',\\r\\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\\r\\nparser.add_argument('--output-path', dest='output_path', action='store',\\r\\n help='Folder where to store the results (using the same name as the input_file)')\\r\\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\\r\\n help='Folder where the building blocks to build the workbook are stored)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\nchecklist_file = args.checklist_file\\r\\ntechnology = args.technology\\r\\n\\r\\nblock_workbook = None\\r\\nblock_link = None\\r\\nblock_section = None\\r\\nblock_query = None\\r\\nblock_text = None\\r\\n\\r\\nquery_size = 4 # 0: medium, 1: small, 4: tiny\\r\\n\\r\\n# Workbook building blocks\\r\\ndef load_building_blocks():\\r\\n\\r\\n # Define the blocks as global variables\\r\\n global block_workbook\\r\\n global block_link\\r\\n global block_section\\r\\n global block_query\\r\\n global block_text\\r\\n\\r\\n # Set folder where to load from\\r\\n if args.blocks_path:\\r\\n blocks_path = args.blocks_path\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Setting building block folder to {0}\\\".format(blocks_path))\\r\\n else:\\r\\n print(\\\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\\\")\\r\\n sys.exit(1)\\r\\n\\r\\n # Load initial workbook building block\\r\\n block_file = os.path.join(blocks_path, 'block_workbook.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_workbook = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load link building block\\r\\n block_file = os.path.join(blocks_path, 'block_link.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_link = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load itemgroup (aka section) building block\\r\\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_section = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load query building block\\r\\n block_file = os.path.join(blocks_path, 'block_query.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_query = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n # Load text building block\\r\\n block_file = os.path.join(blocks_path, 'block_text.json')\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\\n try:\\r\\n with open(block_file) as f:\\r\\n block_text = json.load(f)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening JSON workbook building block\\\", block_file, \\\"-\\\", str(e))\\r\\n sys.exit(0)\\r\\n\\r\\n# Main function to generate the workbook JSON\\r\\ndef generate_workbook(output_file, checklist_data):\\r\\n\\r\\n # Initialize an empty workbook\\r\\n workbook = block_workbook\\r\\n\\r\\n # Generate one tab in the workbook for each category\\r\\n category_id = 0\\r\\n query_id = 0\\r\\n category_dict = {}\\r\\n for item in checklist_data.get(\\\"categories\\\"):\\r\\n category_title = item.get(\\\"name\\\")\\r\\n category_id += 1\\r\\n category_dict[category_title] = category_id + 1 # We will use this dict later to know where to put each query\\r\\n # Create new link\\r\\n new_link = block_link.copy()\\r\\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\\r\\n new_link['linkLabel'] = category_title\\r\\n new_link['subTarget'] = 'category' + str(category_id)\\r\\n new_link['preText'] = category_title\\r\\n # Create new section\\r\\n new_section = block_section.copy()\\r\\n new_section['name'] = 'category' + str(category_id)\\r\\n new_section['conditionalVisibility']['value'] = 'category' + str(category_id)\\r\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":["load_building_blocks()\r"," output_file = get_output_file(checklist_url, is_file=False)\r"," generate_workbook(output_file, checklist_data)\r"," output_file = get_output_file(checklist_file, is_file=True)\r"," for item in checklist_data.get(\"categories\"):\r"," category_title = item.get(\"name\")\r"," checklist_data = json.load(f)\r"," response = requests.get(checklist_url)\r"," for item in checklist_data.get(\"items\"):\r"," guid = item.get(\"guid\")\r"," category = item.get(\"category\")\r"," subcategory = item.get(\"subcategory\")\r"," text = item.get(\"text\")\r"," description = item.get(\"description\")\r"," severity = item.get(\"severity\")\r"," link = item.get(\"link\")\r"," training = item.get(\"training\")\r"," graph_query = item.get(\"graph\")\r"," block_workbook = json.load(f)\r"," block_link = json.load(f)\r"," block_section = json.load(f)\r"," block_query = json.load(f)\r"," block_text = json.load(f)\r"," print (\"DEBUG: Loading file {0}...\".format(block_file))\r"," print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\r"," print (\"DEBUG: - Query: {0}\".format(str(block_query)))\r","\r"," try:\r","block_workbook = None\r"," if output_file:\r"," block_file = os.path.join(blocks_path, 'block_itemgroup.json')\r"," new_section['content']['items'][0]['content']['json'] = \"## \" + category_title\r"],"string":"[\n \"load_building_blocks()\\r\",\n \" output_file = get_output_file(checklist_url, is_file=False)\\r\",\n \" generate_workbook(output_file, checklist_data)\\r\",\n \" output_file = get_output_file(checklist_file, is_file=True)\\r\",\n \" for item in checklist_data.get(\\\"categories\\\"):\\r\",\n \" category_title = item.get(\\\"name\\\")\\r\",\n \" checklist_data = json.load(f)\\r\",\n \" response = requests.get(checklist_url)\\r\",\n \" for item in checklist_data.get(\\\"items\\\"):\\r\",\n \" guid = item.get(\\\"guid\\\")\\r\",\n \" category = item.get(\\\"category\\\")\\r\",\n \" subcategory = item.get(\\\"subcategory\\\")\\r\",\n \" text = item.get(\\\"text\\\")\\r\",\n \" description = item.get(\\\"description\\\")\\r\",\n \" severity = item.get(\\\"severity\\\")\\r\",\n \" link = item.get(\\\"link\\\")\\r\",\n \" training = item.get(\\\"training\\\")\\r\",\n \" graph_query = item.get(\\\"graph\\\")\\r\",\n \" block_workbook = json.load(f)\\r\",\n \" block_link = json.load(f)\\r\",\n \" block_section = json.load(f)\\r\",\n \" block_query = json.load(f)\\r\",\n \" block_text = json.load(f)\\r\",\n \" print (\\\"DEBUG: Loading file {0}...\\\".format(block_file))\\r\",\n \" print(\\\"DEBUG: new checklist file list:\\\", str(checklist_file_list))\\r\",\n \" print (\\\"DEBUG: - Query: {0}\\\".format(str(block_query)))\\r\",\n \"\\r\",\n \" try:\\r\",\n \"block_workbook = None\\r\",\n \" if output_file:\\r\",\n \" block_file = os.path.join(blocks_path, 'block_itemgroup.json')\\r\",\n \" new_section['content']['items'][0]['content']['json'] = \\\"## \\\" + category_title\\r\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":205,"cells":{"repo_id":{"kind":"number","value":56,"string":"56"},"repo_name":{"kind":"string","value":"azure__review-checklists"},"project_context":{"kind":"string","value":"azure__review-checklists METASEP\n\nweb/flaskmysql/app.py METASEP\n#app.py\nfrom flask import Flask, request, render_template, jsonify\nfrom flaskext.mysql import MySQL #pip install flask-mysql\nimport pymysql\nimport os\n \napp = Flask(__name__)\n\n# Get database credentials from environment variables\nmysql_server_fqdn = os.environ.get(\"MYSQL_FQDN\")\nif mysql_server_fqdn == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_FQDN' with the FQDN of the MySQL server\")\n sys.exit(1)\nmysql_server_name = mysql_server_fqdn.split('.')[0]\nmysql_server_username = os.environ.get(\"MYSQL_USER\")\nif mysql_server_username == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_USER' with the FQDN of the MySQL username\")\n sys.exit(1)\nif not mysql_server_username.__contains__('@'):\n mysql_server_username += '@' + mysql_server_name\nmysql_server_password = os.environ.get(\"MYSQL_PASSWORD\")\nif mysql_server_password == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_PASSWORD' with the FQDN of the MySQL password\")\n sys.exit(1)\n\n# Open connection\nmysql = MySQL()\napp.config['MYSQL_DATABASE_USER'] = mysql_server_username\napp.config['MYSQL_DATABASE_PASSWORD'] = mysql_server_password\napp.config['MYSQL_DATABASE_DB'] = 'checklist'\napp.config['MYSQL_DATABASE_HOST'] = mysql_server_fqdn\nmysql.init_app(app)\n \n@app.route('/')\ndef home():\n app.logger.info(\"DEBUG: Connecting to database...\") \n try:\n category_filter = request.args.get('category', None)\n status_filter = request.args.get('status', None)\n severity_filter = request.args.get('severity', None)\n except Exception as e:\n app.logger.info(\"ERROR reading query parameters for filters: {0}\".format(str(e)))\n pass\n try:\n conn = mysql.connect()\n cursor = conn.cursor(pymysql.cursors.DictCursor)\n except Exception as e:\n app.logger.info(\"ERROR opening cursor to DB connection: {0}\".format(str(e)))\n return jsonify(str(e))\n try:\n sqlquery = \"SELECT * from items\"\n filter1_added = False\n # category filter\n if category_filter:\n if filter1_added:\n sqlquery += \" AND \"\n else:\n sqlquery += \" WHERE \"\n filter1_added = True\n sqlquery += \"category = '{0}'\".format(category_filter)\n # status filter\n if status_filter:\n if filter1_added:\n sqlquery += \" AND \"\n else:\n sqlquery += \" WHERE \"\n filter1_added = True\n sqlquery += \"status = '{0}'\".format(status_filter)\n # severity filter\n if severity_filter:\n if filter1_added:\n sqlquery += \" AND \"\n else:\n sqlquery += \" WHERE \"\n filter1_added = True\n sqlquery += \"severity = '{0}'\".format(severity_filter)\n # send queries\n app.logger.info (\"Retrieving checklist items with query '{0}'\".format(sqlquery))\n cursor.execute(sqlquery)\n itemslist = cursor.fetchall()\n cursor.execute(\"SELECT DISTINCT category FROM items\")\n categorylist = cursor.fetchall()\n cursor.execute(\"SELECT DISTINCT severity FROM items\")\n severitylist = cursor.fetchall()\n cursor.execute(\"SELECT DISTINCT status FROM items\")\n statuslist = cursor.fetchall()\n return render_template('index.html', itemslist=itemslist, categorylist=categorylist, severitylist=severitylist, statuslist=statuslist)\n except Exception as e:\n app.logger.info(\"ERROR sending query: {0}\".format(str(e)))\n return jsonify(str(e))\n \n@app.route(\"/update\",methods=[\"POST\",\"GET\"])\ndef update():\n app.logger.info(\"Processing {0} with request.form {1}\".format(str(request.method), str(request.form))) \n try:\n conn = mysql.connect()\n cursor = conn.cursor(pymysql.cursors.DictCursor)\n if request.method == 'POST':\n field = request.form['field'] \n value = request.form['value']\n editid = request.form['id']\n app.logger.info(\"Processing POST for field '{0}', editid '{1}' and value '{2}'\".format(field, value, editid)) \n \n if field == 'comment' and value != '':\n sql = \"UPDATE items SET comments=%s WHERE guid=%s\"\n data = (value, editid)\n conn = mysql.connect()\n cursor = conn.cursor()\n app.logger.info (\"Sending SQL query '{0}' with data '{1}'\".format(sql, str(data)))\n cursor.execute(sql, data)\n conn.commit()\n elif field == 'status' and value != '':\n sql = \"UPDATE items SET status=%s WHERE guid=%s\"\n data = (value, editid)\n conn = mysql.connect()\n cursor = conn.cursor()\n app.logger.info (\"Sending SQL query '{0}' with data '{1}'\".format(sql, str(data)))\n cursor.execute(sql, data)\n conn.commit()\n else:\n app.logger.info (\"Field is '{0}', value is '{1}': not doing anything\".format(field, value))\n success = 1\n return jsonify(success)\n except Exception as e:\n app.logger.info(\"Oh oh, there is an error: {0}\".format(str(e)))\n success = 0\n return jsonify(success)\n finally:\n cursor.close() \n conn.close()\n \nif __name__ == \"__main__\":\n app.run(host='0.0.0.0', debug=True)\n\n\nweb/fillgraphdb/graph_db.py METASEP\nimport os\nimport sys\nimport pymysql\nimport json\nimport time\nimport requests\nimport azure.mgmt.resourcegraph as arg\nfrom datetime import datetime\nfrom azure.mgmt.resource import SubscriptionClient\nfrom azure.identity import AzureCliCredential\nfrom azure.identity import DefaultAzureCredential\nfrom azure.identity import ClientSecretCredential\n\n# Database and table name\nmysql_db_name = \"checklist\"\nmysql_db_table = \"items\"\nuse_ssl = \"yes\"\n\n# Format a string to be included in a SQL query as value\ndef escape_quotes (this_value):\n return str(this_value).replace(\"'\", \"\\\\'\")\n\n# Function to send an Azure Resource Graph query\ndef get_resources (graph_query, argClient, subsList, argQueryOptions):\n # TO DO: Authentication should probably happen outside of this function\n try:\n # Create query\n argQuery = arg.models.QueryRequest(subscriptions=subsList, query=graph_query, options=argQueryOptions)\n # Run query and return results\n argResults = argClient.resources(argQuery)\n print(\"DEBUG: query results: {0}\".format(str(argResults)))\n return argResults\n except Exception as e:\n print(\"ERROR: Error sending Azure Resource Graph query to Azure: {0}\".format(str(e)))\n # sys.exit(0) # Debugging.... Probably this should be exit(1)\n return ''\n\n# Wait for IMDS endpoint to be available\ntry:\n wait_max_intervals = int(os.environ.get(\"WAIT_INTERVALS\"))\n print (\"DEBUG: WAIT_INTERVALS read from environment variable: {0}\".format(str(wait_max_intervals)))\nexcept:\n wait_max_intervals = 5\n print (\"DEBUG: WAIT_INTERVALS set to default value: {0}\".format(str(wait_max_intervals)))\nwait_interval = 10.0\nimds_url = 'http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://management.azure.com/'\nimds_headers = {\n \"Metadata\" : \"true\"\n}\nimds_tries = 0\nbreak_loop = False\nprint ('DEBUG: Going into waiting loop to make sure the metadata endpoint is active...')\nwhile not break_loop:\n imds_tries += 1\n print (\"DEBUG: We are in the loop, pass {0}/{1} ({2}). Trying the IMDS endpoint...\".format(str(imds_tries), str(wait_max_intervals), str(datetime.now())))\n if imds_tries > wait_max_intervals:\n print(\"ERROR: max wait intervals exceeded when waiting for IMDS to answer, hopefully you specified some SP credentials as SP variables...\")\n break_loop = True\n else:\n print (\"DEBUG: Sending GET request to {0}...\".format(imds_url))\n try:\n imds_response = requests.get(imds_url, headers=imds_headers, timeout=1)\n if imds_response.status_code >= 200 and imds_response.status_code <= 299:\n print (\"DEBUG: IMDS endpoint seems to be working, received status code {0} and answer {1}\".format(str(imds_response.status_code), str(imds_response.text)))\n break_loop = True\n else:\n print (\"DEBUG: IMDS endpoint doesnt seem to be working, received status code {0} and answer {1}\".format(str(imds_response.status_code), str(imds_response.text)))\n except Exception as e:\n print(\"DEBUG: Error sending request to IMDS endpoint: {0}\".format(str(e)))\n pass\n if not break_loop:\n print(\"DEBUG: Going to sleep {0} seconds before next try...\".format(str(wait_interval)))\n time.sleep (wait_interval)\n\n# Authenticate to Azure, either with Managed Identity or SP\nprint('DEBUG: Authenticating to Azure...')\ntry:\n print('DEBUG: Getting environment variables...')\n # credential = AzureCliCredential() # Get your credentials from Azure CLI (development only!) and get your subscription list\n tenant_id = os.environ.get(\"AZURE_TENANT_ID\")\n client_id = os.environ.get(\"AZURE_CLIENT_ID\")\n client_secret = os.environ.get(\"AZURE_CLIENT_SECRET\")\nexcept Exception as e:\n print(\"ERROR: Error getting environment variables: {0}\".format(str(e)))\n tenant_id = None\n client_id = None\n client_secret = None\n pass \ntry:\n if tenant_id and client_id and client_secret:\n print(\"DEBUG: Service principal credentials (client ID {0}, tenant ID {1}) retrieved from environment variables, trying SP-based authentication now...\".format(str(client_id), str(tenant_id)))\n credential = ClientSecretCredential(tenant_id=tenant_id, client_id=client_id, client_secret=client_secret)\n else:\n print('DEBUG: Service principal credentials could not be retrieved from environment variables, trying default authentication method with Managed Identity...')\n credential = DefaultAzureCredential() # Managed identity\nexcept Exception as e:\n print(\"ERROR: Error during Azure Authentication: {0}\".format(str(e)))\n sys.exit(1)\ntry:\n print('DEBUG: Getting subscriptions...')\n subsClient = SubscriptionClient(credential)\n subsRaw = []\n for sub in subsClient.subscriptions.list():\n subsRaw.append(sub.as_dict())\n subsList = []\n for sub in subsRaw:\n subsList.append(sub.get('subscription_id'))\n print (\"DEBUG: provided credentials give access to {0} subscription(s)\".format(str(len(subsList))))\n # Create Azure Resource Graph client and set options\n print('DEBUG: Creating client object...')\n argClient = arg.ResourceGraphClient(credential)\n argQueryOptions = arg.models.QueryRequestOptions(result_format=\"objectArray\")\nexcept Exception as e:\n print(\"ERROR: Error creating resource graph client object: {0}\".format(str(e)))\n sys.exit(1)\n\n# Get database credentials from environment variables\nmysql_server_fqdn = os.environ.get(\"MYSQL_FQDN\")\nif mysql_server_fqdn == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_FQDN' with the FQDN of the MySQL server\")\n sys.exit(1)\nelse:\n print(\"DEBUG: mysql FQDN retrieved from environment variables: '{0}'\".format(mysql_server_fqdn))\nmysql_server_name = mysql_server_fqdn.split('.')[0]\nmysql_server_username = os.environ.get(\"MYSQL_USER\")\nif mysql_server_username == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_USER' with the FQDN of the MySQL username\")\n sys.exit(1)\nelse:\n print(\"DEBUG: mysql authentication username retrieved from environment variables: '{0}'\".format(mysql_server_username))\nif not mysql_server_username.__contains__('@'):\n mysql_server_username += '@' + mysql_server_name\nmysql_server_password = os.environ.get(\"MYSQL_PASSWORD\")\nif mysql_server_password == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_PASSWORD' with the FQDN of the MySQL password\")\n sys.exit(1)\nelse:\n print(\"DEBUG: mysql authentication password retrieved from environment variables: {0}\".format(\"********\"))\n\n# Create connection to MySQL server and number of records\nprint (\"DEBUG: Connecting to '{0}' with username '{1}'...\".format(mysql_server_fqdn, mysql_server_username))\nif use_ssl == 'yes':\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, database = mysql_db_name, passwd = mysql_server_password, ssl = {'ssl':{'ca': 'BaltimoreCyberTrustRoot.crt.pem'}})\nelse:\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, database = mysql_db_name, passwd = mysql_server_password)\nsql_query = \"SELECT * FROM {0} WHERE graph_query_success IS NOT null AND graph_query_failure IS NOT null AND graph_query_success != 'None' AND graph_query_failure != 'None';\".format (mysql_db_table)\ncursor = db.cursor()\ncursor.execute(sql_query)\nrows = cursor.fetchall()\nrow_cnt = 0\nif len(rows) > 0:\n for row in rows:\n row_cnt += 1\n result_text = ''\n item_guid = row[0]\n item_success_query = row[10]\n item_failure_query = row[11]\n # print (\"DEBUG {0}: '{1}', '{2}'\".format(item_guid, item_success_query, item_failure_query))\n success_resources = str(get_resources(item_success_query, argClient, subsList, argQueryOptions)).replace(\"'\", '\"')\n success_resources = success_resources.replace(': None', ': \"None\"')\n # print (\"DEBUG: SUCCESS QUERY: {0}\".format(success_resources))\n if success_resources:\n try:\n success_resources_object = json.loads(success_resources)\n except:\n print(\"ERROR: JSON returned from Azure Graph Query not valid: {0}\".format(success_resources))\n for resource in success_resources_object['data']:\n if result_text: result_text += '\\n'\n result_text += \"SUCCESS: {0}\".format(resource[\"id\"])\n failure_resources = str(get_resources(item_failure_query, argClient, subsList, argQueryOptions)).replace(\"'\", '\"')\n failure_resources = failure_resources.replace(': None', ': \"None\"')\n # print (\"DEBUG: FAILURE QUERY: {0}\".format(failure_resources))\n if failure_resources:\n try:\n failure_resources_object = json.loads(failure_resources)\n except:\n print(\"ERROR: JSON returned from Azure Graph Query not valid: {0}\".format(failure_resources))\n for resource in failure_resources_object['data']:\n if result_text: result_text += '\\n'\n result_text += \"FAILURE: {0}\".format(resource[\"id\"])\n # print (\"DEBUG: Result summary: \\n{0}\".format(result_text))\n if result_text:\n update_query = \"UPDATE items SET graph_query_result = '{0}' WHERE guid = '{1}';\".format(result_text, item_guid)\n print (\"DEBUG: sending SQL query '{0}'\".format(update_query))\n try:\n cursor.execute(update_query)\n db.commit()\n except Exception as e:\n print(\"ERROR: Error sending SQL query to MySql server: {0}\".format(str(e)))\n pass\n else:\n print(\"DEBUG: No results could be retrieved for the success and failure queries of checklist item {0}\".format(item_guid))\nelse:\n row_count = 0\nprint (\"INFO: Processed table {0} in database {1} with {2} records with graph queries. Happy review!\".format(mysql_db_table, mysql_db_name, str(row_cnt)))\n\n# Bye\ndb.close()\nweb/filldb/fill_db.py METASEP\nimport requests\nimport json\nimport os\nimport sys\nimport pymysql\n\n# Database and table name\nmysql_db_name = \"checklist\"\nmysql_db_table = \"items\"\nuse_ssl = \"yes\"\n\n# Format a string to be included in a SQL query as value\ndef escape_quotes(this_value):\n return str(this_value).replace(\"'\", \"\\\\'\")\n\n# Get database credentials from environment variables\nmysql_server_fqdn = os.environ.get(\"MYSQL_FQDN\")\nif mysql_server_fqdn == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_FQDN' with the FQDN of the MySQL server\")\n sys.exit(1)\nmysql_server_name = mysql_server_fqdn.split('.')[0]\nmysql_server_username = os.environ.get(\"MYSQL_USER\")\nif mysql_server_username == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_USER' with the FQDN of the MySQL username\")\n sys.exit(1)\nif not mysql_server_username.__contains__('@'):\n mysql_server_username += '@' + mysql_server_name\nmysql_server_password = os.environ.get(\"MYSQL_PASSWORD\")\nif mysql_server_password == None:\n print(\"ERROR: Please define an environment variable 'MYSQL_PASSWORD' with the FQDN of the MySQL password\")\n sys.exit(1)\n\n# Create connection to MySQL server and get version\nprint (\"INFO: Connecting to {0} with username {1}...\".format(mysql_server_fqdn, mysql_server_username))\nif use_ssl == 'yes':\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, passwd = mysql_server_password, ssl = {'ssl':{'ca': 'BaltimoreCyberTrustRoot.crt.pem'}})\nelse:\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, passwd = mysql_server_password)\nsql_query = \"SELECT VERSION();\"\ncursor = db.cursor()\ncursor.execute(sql_query)\nrows = cursor.fetchall()\ndata = \"\"\nif len(rows) > 0:\n for row in rows:\n if len(data) > 0:\n data += ', '\n data += str(''.join(row))\nprint (\"INFO: Connected to MySQL server {0} with version {1}\".format(mysql_server_fqdn, data))\n\n# Delete db if existed\nsql_query = \"DROP DATABASE IF EXISTS {0};\".format(mysql_db_name)\n# print (\"Sending query: {0}\".format(sql_query))\ncursor.execute(sql_query)\ndb.commit()\n\n# Create database\nsql_query = \"CREATE DATABASE IF NOT EXISTS {0};\".format(mysql_db_name)\n# print (\"Sending query: {0}\".format(sql_query))\ncursor.execute(sql_query)\ndb.commit()\nsql_query = \"USE {0}\".format(mysql_db_name)\n# print (\"Sending query: {0}\".format(sql_query))\ncursor.execute(sql_query)\ndb.commit()\n\n# Create table\nsql_query = \"\"\"CREATE TABLE {0} (\n guid varchar(40),\n text varchar(1024),\n description varchar(1024),\n link varchar(255),\n training varchar(255),\n comments varchar(1024),\n severity varchar(10),\n status varchar(15),\n category varchar(255),\n subcategory varchar(255),\n graph_query_success varchar(1024),\n graph_query_failure varchar(1024),\n graph_query_result varchar(4096)\n);\"\"\".format(mysql_db_table)\n# print (\"DEBUG: Sending query: {0}\".format(sql_query))\ncursor.execute(sql_query)\ndb.commit()\n\n# Download checklist\ntechnology = os.environ.get(\"CHECKLIST_TECHNOLOGY\")\nif technology:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\" + technology + \"_checklist.en.json\"\nelse:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\"\nresponse = requests.get(checklist_url)\n\n# If download was successful\nif response.status_code == 200:\n print (\"INFO: File {0} downloaded successfully\".format(checklist_url))\n try:\n # Deserialize JSON to object variable\n checklist_object = json.loads(response.text)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\n # Get default status from the JSON, default to \"Not verified\"\n try:\n status_list = checklist_object.get(\"status\")\n default_status = status_list[0].get(\"name\")\n except:\n default_status = \"Not verified\"\n pass\n # For each checklist item, add a row to mysql DB\n row_counter = 0\n for item in checklist_object.get(\"items\"):\n guid = item.get(\"guid\")\n category = item.get(\"category\")\n subcategory = item.get(\"subcategory\")\n text = escape_quotes(item.get(\"text\"))\n description = escape_quotes(item.get(\"description\"))\n severity = item.get(\"severity\")\n link = item.get(\"link\")\n training = item.get(\"training\")\n status = default_status\n graph_query_success = escape_quotes(item.get(\"graph_success\"))\n graph_query_failure = escape_quotes(item.get(\"graph_failure\"))\n # print(\"DEBUG: Adding to table {0}: '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}', '{8}', '{9}', '{10}'\".format(mysql_db_table, category, subcategory, text, description, severity, link, training, graph_query_success, graph_query_failure, guid))\n sql_query = \"\"\"INSERT INTO {0} (category,subcategory,text,description,severity,link,training,graph_query_success,graph_query_failure,guid,status) \n VALUES ('{1}','{2}','{3}','{4}','{5}', '{6}','{7}','{8}','{9}','{10}', '{11}');\"\"\".format(mysql_db_table, category, subcategory, text, description, severity, link, training, graph_query_success, graph_query_failure, guid, status)\n # print (\"DEBUG: Sending query: {0}\".format(sql_query))\n cursor.execute(sql_query)\n db.commit()\n row_counter += 1\nelse:\n print (\"Error downloading {0}\".format(checklist_url))\n\n# Bye\nprint(\"INFO: {0} rows added to database.\".format(str(row_counter)))\ndb.close()\nweb/filldb/check_db.py METASEP\nimport os\nimport sys\nimport pymysql\n\n# Database and table name\nmysql_db_name = \"checklist\"\nmysql_db_table = \"items\"\nuse_ssl = \"yes\"\n\n# Format a string to be included in a SQL query as value\ndef escape_quotes(this_value):\n return str(this_value).replace(\"'\", \"\\\\'\")\n\n# Get database credentials from environment variables\nmysql_server_fqdn = os.environ.get(\"MYSQL_FQDN\")\nif mysql_server_fqdn == None:\n print(\"Please define an environment variable 'MYSQL_FQDN' with the FQDN of the MySQL server\")\n sys.exit(1)\nmysql_server_name = mysql_server_fqdn.split('.')[0]\nmysql_server_username = os.environ.get(\"MYSQL_USER\")\nif mysql_server_username == None:\n print(\"Please define an environment variable 'MYSQL_USER' with the FQDN of the MySQL username\")\n sys.exit(1)\nif not mysql_server_username.__contains__('@'):\n mysql_server_username += '@' + mysql_server_name\nmysql_server_password = os.environ.get(\"MYSQL_PASSWORD\")\nif mysql_server_password == None:\n print(\"Please define an environment variable 'MYSQL_PASSWORD' with the FQDN of the MySQL password\")\n sys.exit(1)\n\n# Create connection to MySQL server and number of records\nprint (\"Connecting to {0} with username {1}...\".format(mysql_server_fqdn, mysql_server_username))\nif use_ssl == 'yes':\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, database = mysql_db_name, passwd = mysql_server_password, ssl = {'ssl':{'ca': 'BaltimoreCyberTrustRoot.crt.pem'}})\nelse:\n db = pymysql.connect(host=mysql_server_fqdn, user = mysql_server_username, database = mysql_db_name, passwd = mysql_server_password)\nsql_query = \"SELECT COUNT(*) FROM {0};\".format (mysql_db_table)\ncursor = db.cursor()\ncursor.execute(sql_query)\nrows = cursor.fetchall()\nif len(rows) > 0:\n row_count = rows[0][0]\nelse:\n row_count = 0\nprint (\"Table {0} in database {1} contains {2} records\".format(mysql_db_table, mysql_db_name, str(row_count)))\n\n# Bye\ndb.close()\nscripts/workbook_create.py METASEP\n######################################################################\n#\n# This script reads the checklist items from the latest checklist file\n# in Github (or from a local file) and generates an Azure Monitor\n# workbook in JSON format.\n# \n# Last updated: February 2023\n#\n######################################################################\n\nimport json\nimport argparse\nimport sys\nimport os\nimport requests\nimport glob\nimport uuid\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Generate Azure Monitor workbook from Azure Review Checklist')\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\nparser.add_argument('--technology', dest='technology', action='store',\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\nparser.add_argument('--output-file', dest='output_file', action='store',\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\nparser.add_argument('--output-path', dest='output_path', action='store',\n help='Folder where to store the results (using the same name as the input_file)')\nparser.add_argument('--blocks-path', dest='blocks_path', action='store',\n help='Folder where the building blocks to build the workbook are stored)')\nparser.add_argument('--create-arm-template', dest='create_arm_template', action='store_true',\n default=False,\n help='create an ARM template, additionally to the workbook JSON (default: False)')\nparser.add_argument('--category', dest='category', action='store',\n help='if the workbook should be restricted to a category containing the specified string')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\nchecklist_file = args.checklist_file\ntechnology = args.technology\n\nblock_workbook = None\nblock_link = None\nblock_section = None\nblock_query = None\nblock_text = None\n\nquery_size = 4 # 0: medium, 1: small, 4: tiny\n\n# Workbook building blocks\ndef load_building_blocks():\n\n # Define the blocks as global variables\n global block_workbook\n global block_link\n global block_section\n global block_query\n global block_text\n global block_arm\n\n # Set folder where to load from\n if args.blocks_path:\n blocks_path = args.blocks_path\n if args.verbose:\n print (\"DEBUG: Setting building block folder to {0}\".format(blocks_path))\n else:\n print(\"ERROR: please use the argument --blocks-path to specify the location of the workbook building blocks.\")\n sys.exit(1)\n\n # Load initial workbook building block\n block_file = os.path.join(blocks_path, 'block_workbook.json')\n if args.verbose:\n print (\"DEBUG: Loading file {0}...\".format(block_file))\n try:\n with open(block_file) as f:\n block_workbook = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\n sys.exit(0)\n # Load link building block\n block_file = os.path.join(blocks_path, 'block_link.json')\n if args.verbose:\n print (\"DEBUG: Loading file {0}...\".format(block_file))\n try:\n with open(block_file) as f:\n block_link = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\n sys.exit(0)\n # Load itemgroup (aka section) building block\n block_file = os.path.join(blocks_path, 'block_itemgroup.json')\n if args.verbose:\n print (\"DEBUG: Loading file {0}...\".format(block_file))\n try:\n with open(block_file) as f:\n block_section = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\n sys.exit(0)\n # Load query building block\n block_file = os.path.join(blocks_path, 'block_query.json')\n if args.verbose:\n print (\"DEBUG: Loading file {0}...\".format(block_file))\n try:\n with open(block_file) as f:\n block_query = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\n sys.exit(0)\n # Load text building block\n block_file = os.path.join(blocks_path, 'block_text.json')\n if args.verbose:\n print (\"DEBUG: Loading file {0}...\".format(block_file))\n try:\n with open(block_file) as f:\n block_text = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when opening JSON workbook building block\", block_file, \"-\", str(e))\n sys.exit(0)\n # Load ARM template building block\n block_file = os.path.join(blocks_path, 'block_arm.json')\n if args.verbose:\n print (\"DEBUG: Loading file {0}...\".format(block_file))\n try:\n with open(block_file) as f:\n block_arm = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when opening JSON ARM template building block\", block_file, \"-\", str(e))\n sys.exit(0)\n\n# Function that corrects format issues in the queries stored in JSON\ndef fix_query_format(query_string):\n if query_string:\n query_string = str(query_string).replace('\\\\\\\\', '\\\\') # Replace a double escaping inverted bar ('\\\\\\\\') through a single one ('\\')\n return query_string\n else:\n return None\n\n# Function that transforms a JSON string to be included in an ARM template\ndef serialize_data(workbook_string):\n if workbook_string:\n # Escape double quotes\n workbook_string = str(workbook_string).replace('\"', '\\\"')\n # Escape escape characters\n # workbook_string = str(workbook_string).replace('\\\\', '\\\\\\\\')\n # Undo the scaping for the newline character (otherwise the markdown in the workbook would look wrong).\n # Note that this might impact newline characters in queries!\n # workbook_string = str(workbook_string).replace('\\\\\\\\n', '\\\\n')\n return workbook_string\n else:\n return None\n\n# Main function to generate the workbook JSON\ndef generate_workbook(output_file, checklist_data):\n\n # Initialize an empty workbook\n workbook = json.loads(json.dumps(block_workbook))\n workbook_title = \"## \" + checklist_data['metadata']['name']\n if args.category:\n workbook_title += ' - ' + args.category[0].upper() + args.category[1:]\n workbook_title += \"\\n---\\n\\nThis workbook has been automatically generated out of the checklists in the [Azure Review Checklists repo](https://github.com/Azure/review-checklists).\"\n workbook['items'][0]['content']['json'] = workbook_title\n\n # Decide whether we will match in the category, or subcategory, and update the corresponding variables\n if args.category:\n if args.verbose:\n print(\"DEBUG: creating tab list with subcategories list for categories containing the term {0}...\".format(args.category))\n tab_name_field = 'subcategory'\n tab_title_list = [x[\"subcategory\"] for x in checklist_data.get(\"items\") if (args.category.lower() in str(x[\"category\"]).lower())]\n tab_title_list = list(set(tab_title_list))\n else:\n if args.verbose:\n print(\"DEBUG: creating tab list with categories...\")\n tab_name_field = 'category'\n tab_title_list = [x[\"name\"] for x in checklist_data.get(\"categories\")]\n if args.verbose:\n print(\"DEBUG: created tab list: {0}\".format(str(tab_title_list)))\n\n # Generate one tab in the workbook for each category/subcategory\n tab_id = 0\n query_id = 0\n tab_dict = {}\n \n for tab_title in tab_title_list:\n tab_dict[tab_title] = tab_id # We will use this dict later to know where to put each query\n tab_id += 1\n # Create new link\n new_link = block_link.copy()\n new_link['id'] = str(uuid.uuid4()) # RANDOM GUID\n new_link['linkLabel'] = tab_title\n new_link['subTarget'] = 'category' + str(tab_id)\n new_link['preText'] = tab_title\n # Create new section\n new_section = block_section.copy()\n new_section['name'] = 'category' + str(tab_id)\n new_section['conditionalVisibility']['value'] = 'category' + str(tab_id)\n new_section['content']['items'][0]['content']['json'] = \"## \" + tab_title\n new_section['content']['items'][0]['name'] = 'category' + str(tab_id) + 'title'\n # Add link and query to workbook\n # if args.verbose:\n # print()\n # print (\"DEBUG: Adding link: {0}\".format(json.dumps(new_link)))\n # print (\"DEBUG: Adding section: {0}\".format(json.dumps(new_section)))\n # print(\"DEBUG: Workbook so far: {0}\".format(json.dumps(workbook)))\n workbook['items'][2]['content']['links'].append(new_link.copy()) # I am getting crazy with Python variable references :(\n # Add section to workbook\n new_new_section=json.loads(json.dumps(new_section.copy()))\n workbook['items'].append(new_new_section)\n\n if args.verbose:\n print(\"DEBUG: category dictionary generated: {0}\".format(str(tab_dict)))\n\n # For each checklist item, add a query to the workbook\n for item in checklist_data.get(\"items\"):\n # Read variables from JSON\n guid = item.get(\"guid\")\n tab = item.get(tab_name_field)\n text = item.get(\"text\")\n description = item.get(\"description\")\n severity = item.get(\"severity\")\n link = item.get(\"link\")\n training = item.get(\"training\")\n graph_query = fix_query_format(item.get(\"graph\"))\n if graph_query and (tab in tab_title_list):\n if args.verbose:\n print(\"DEBUG: adding sections to workbook for ARG query '{0}', length of query is {1}\".format(str(graph_query), str(len(str(graph_query)))))\n query_id += 1\n # Create new text\n new_text = block_text.copy()\n new_text['name'] = 'querytext' + str(query_id)\n new_text['content']['json'] = text\n if link:\n new_text['content']['json'] += \". Check [this link](\" + link + \") for further information.\"\n if training:\n new_text['content']['json'] += \". [This training](\" + training + \") can help to educate yourself on this.\"\n # Create new query\n new_query = block_query.copy()\n new_query['name'] = 'query' + str(query_id)\n new_query['content']['query'] = graph_query\n new_query['content']['size'] = query_size\n # Add text and query to the workbook\n tab_id = tab_dict[tab] + len(block_workbook['items'])\n if args.verbose:\n print (\"DEBUG: Adding text and query to tab ID {0} ({1})\".format(str(tab_id), tab))\n print (\"DEBUG: Workbook object name is {0}\".format(workbook['items'][tab_id]['name']))\n new_new_text = json.loads(json.dumps(new_text.copy()))\n new_new_query = json.loads(json.dumps(new_query.copy()))\n workbook['items'][tab_id]['content']['items'].append(new_new_text)\n workbook['items'][tab_id]['content']['items'].append(new_new_query)\n\n # Dump the workbook to the output file or to console, if there was any query in the original checklist\n if query_id > 0:\n if output_file:\n # Dump workbook JSON into a file\n workbook_string = json.dumps(workbook, indent=4)\n with open(output_file, 'w', encoding='utf-8') as f:\n f.write(workbook_string)\n f.close()\n # Create ARM template (optionally, if specified in the parameters)\n if args.create_arm_template:\n arm_output_file = os.path.splitext(output_file)[0] + '_template.json'\n if args.verbose:\n print (\"DEBUG: Creating ARM template in file {0}...\".format(arm_output_file))\n block_arm['parameters']['workbookDisplayName']['defaultValue'] = checklist_data['metadata']['name']\n if args.category:\n block_arm['parameters']['workbookDisplayName']['defaultValue'] += ' - ' + args.category[0].upper() + args.category[1:]\n block_arm['resources'][0]['properties']['serializedData'] = serialize_data(workbook_string)\n arm_string = json.dumps(block_arm, indent=4)\n with open(arm_output_file, 'w', encoding='utf-8') as f:\n f.write(arm_string)\n f.close()\n else:\n print(workbook_string)\n else:\n print(\"INFO: sorry, the analyzed checklist did not contain any graph query\")\n\ndef get_output_file(checklist_file_or_url, is_file=True):\n if is_file:\n output_file = os.path.basename(checklist_file_or_url)\n else:\n output_file = checklist_file_or_url.split('/')[-1]\n if args.output_file:\n return args.output_file\n elif args.output_path:\n # Get filename without path and extension\n output_file = os.path.join(args.output_path, output_file)\n # If category specified, add to output file name\n if args.category:\n return os.path.splitext(output_file)[0] + '_' + str(args.category).lower() + '_workbook.json'\n else:\n return os.path.splitext(output_file)[0] + '_workbook.json'\n else:\n output_file = None\n\n\n########\n# Main #\n########\n\n# First thing of all, load the building blocks\nload_building_blocks()\nif args.verbose:\n print (\"DEBUG: building blocks variables intialized:\")\n print (\"DEBUG: - Workbook: {0}\".format(str(block_workbook)))\n print (\"DEBUG: - Number of items: {0}\".format(str(len(block_workbook['items']))))\n print (\"DEBUG: - Link: {0}\".format(str(block_link)))\n print (\"DEBUG: - Query: {0}\".format(str(block_query)))\n\n# Download checklist or process from local file\nif checklist_file:\n checklist_file_list = checklist_file.split(\" \")\n # Take only the English versions of the checklists (JSON files)\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list if (os.path.splitext(file)[1] == '.json')]\n # Remove duplicates\n checklist_file_list = list(set(checklist_file_list))\n # Go over the list(s)\n for checklist_file in checklist_file_list:\n if args.verbose:\n print(\"DEBUG: Opening checklist file\", checklist_file)\n # Get JSON\n try:\n # Open file\n with open(checklist_file) as f:\n checklist_data = json.load(f)\n # Set output file variable\n output_file = get_output_file(checklist_file, is_file=True)\n # Generate workbook\n generate_workbook(output_file, checklist_data)\n # If error, just continue\n except Exception as e:\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\n # sys.exit(0)\nelse:\n # If no input files specified, fetch the latest from Github...\n if technology:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\" + technology + \"_checklist.en.json\"\n else:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\"\n if args.verbose:\n print(\"DEBUG: Downloading checklist file from\", checklist_url)\n response = requests.get(checklist_url)\n # If download was successful\n if response.status_code == 200:\n if args.verbose:\n print (\"DEBUG: File {0} downloaded successfully\".format(checklist_url))\n try:\n # Deserialize JSON to object variable\n checklist_data = json.loads(response.text)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\n # Set output files\n output_file = get_output_file(checklist_url, is_file=False)\n # Generate workbook\n generate_workbook(output_file, checklist_data)\n\n\nscripts/update_excel_openpyxl.py METASEP\n######################################################################\n#\n# This script reads the checklist items from the latest checklist file\n# in Github (or from a local file) and populates an Excel spreadsheet\n# with the contents.\n# \n# Last updated: March 2022\n#\n######################################################################\n\nimport json\nimport argparse\nimport sys\nimport os\nimport requests\nimport glob\nfrom openpyxl import load_workbook\nfrom openpyxl.worksheet.datavalidation import DataValidation\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--checklist-file', dest='checklist_file', action='store',\n help='You can optionally supply a JSON file containing the checklist you want to dump to the Excel spreadsheet. Otherwise it will take the latest file from Github')\nparser.add_argument('--only-english', dest='only_english', action='store_true', default=False,\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\nparser.add_argument('--find-all', dest='find_all', action='store_true', default=False,\n help='if checklist files are specified, find all the languages for the given checklists (default: False)')\nparser.add_argument('--technology', dest='technology', action='store',\n help='If you do not supply a JSON file with the checklist, you need to specify the technology from which the latest checklist will be downloaded from Github')\nparser.add_argument('--excel-file', dest='excel_file', action='store',\n help='You need to supply an Excel file where the checklist will be written')\nparser.add_argument('--output-excel-file', dest='output_excel_file', action='store',\n help='You can optionally supply an Excel file where the checklist will be saved, otherwise it will be updated in-place')\nparser.add_argument('--output-path', dest='output_path', action='store',\n help='If using --output-name-is-input-name, folder where to store the results')\nparser.add_argument('--output-name-is-input-name', dest='output_name_is_input_name', action='store_true',\n default=False,\n help='Save the output in a file with the same filename as the JSON input, but with xlsx extension')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\nchecklist_file = args.checklist_file\nexcel_file = args.excel_file\ntechnology = args.technology\n\n# Constants\nworksheet_checklist_name = 'Checklist'\nrow1 = 8 # First row after which the Excel spreadsheet will be updated\ncol_checklist_name = \"A\"\nrow_checklist_name = \"4\"\nguid_column_index = \"L\"\ncomment_column_index = \"G\"\nsample_cell_index = 'A4'\ncol_area = \"A\"\ncol_subarea = \"B\"\ncol_check = \"C\"\ncol_desc = \"D\"\ncol_sev = \"E\"\ncol_status = \"F\"\ncol_comment = \"G\"\ncol_link = \"H\"\ncol_training = \"I\"\ncol_arg_success = \"J\"\ncol_arg_failure = \"K\"\ncol_guid = \"L\"\ninfo_link_text = 'More info'\ntraining_link_text = 'Training'\nworksheet_values_name = 'Values'\nvalues_row1 = 2\ncol_values_severity = \"A\"\ncol_values_status = \"B\"\ncol_values_area = \"C\"\ncol_values_description = \"H\"\n\n# Main function\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\n # Load workbook\n try:\n wb = load_workbook(filename = input_excel_file)\n if args.verbose:\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\n except Exception as e:\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\n sys.exit(1)\n\n # Get worksheet\n try:\n ws = wb[worksheet_checklist_name]\n if args.verbose:\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\n except Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\n sys.exit(1)\n\n # Set checklist name\n try:\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\n if args.verbose:\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\n except Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\n sys.exit(1)\n\n # Get default status from the JSON, default to \"Not verified\"\n try:\n status_list = checklist_data.get(\"status\")\n default_status = status_list[0].get(\"name\")\n if args.verbose:\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\n except:\n default_status = \"Not verified\"\n if args.verbose:\n print (\"DEBUG: Using default status 'Not verified'\")\n pass\n\n # For each checklist item, add a row to spreadsheet\n row_counter = row1\n for item in checklist_data.get(\"items\"):\n # Read variables from JSON\n guid = item.get(\"guid\")\n category = item.get(\"category\")\n subcategory = item.get(\"subcategory\")\n text = item.get(\"text\")\n description = item.get(\"description\")\n severity = item.get(\"severity\")\n link = item.get(\"link\")\n training = item.get(\"training\")\n status = default_status\n graph_query_success = item.get(\"graph_success\")\n graph_query_failure = item.get(\"graph_failure\")\n # Update Excel\n ws[col_area + str(row_counter)].value = category\n ws[col_subarea + str(row_counter)].value = subcategory\n ws[col_check + str(row_counter)].value = text\n ws[col_desc + str(row_counter)].value = description\n ws[col_sev + str(row_counter)].value = severity\n ws[col_status + str(row_counter)].value = status\n ws[col_link + str(row_counter)].value = link\n # if link != None:\n # link_elements = link.split('#')\n # link_address = link_elements[0]\n # if len(link_elements) > 1:\n # link_subaddress = link_elements[1]\n # else:\n # link_subaddress = \"\"\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\n ws[col_training + str(row_counter)].value = training\n # if training != None:\n # training_elements = training.split('#')\n # training_address = training_elements[0]\n # if len(training_elements) > 1:\n # training_subaddress = training_elements[1]\n # else:\n # training_subaddress = \"\"\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\n # GUID and ARG queries\n ws[col_arg_success + str(row_counter)].value = graph_query_success\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\n ws[col_guid + str(row_counter)].value = guid\n # Next row\n row_counter += 1\n\n # Display summary\n if args.verbose:\n number_of_checks = row_counter - row1\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\n\n # Get worksheet\n try:\n wsv = wb[worksheet_values_name]\n if args.verbose:\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\n except Exception as e:\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\n sys.exit(1)\n\n # Update categories\n row_counter = values_row1\n for item in checklist_data.get(\"categories\"):\n area = item.get(\"name\")\n wsv[col_values_area + str(row_counter)].value = area\n row_counter += 1\n\n # Display summary\n if args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"categories addedd to Excel spreadsheet\")\n\n # Update status\n row_counter = values_row1\n for item in checklist_data.get(\"status\"):\n status = item.get(\"name\")\n description = item.get(\"description\")\n wsv[col_values_status + str(row_counter)].value = status\n wsv[col_values_description + str(row_counter)].value = description\n row_counter += 1\n\n # Display summary\n if args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\n\n # Update severities\n row_counter = values_row1\n for item in checklist_data.get(\"severities\"):\n severity = item.get(\"name\")\n wsv[col_values_severity + str(row_counter)].value = severity\n row_counter += 1\n\n # Display summary\n if args.verbose:\n print(\"DEBUG:\", str(row_counter - values_row1), \"severities addedd to Excel spreadsheet\")\n\n # Data validation\n # dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\n dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True)\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\n if args.verbose:\n print(\"DEBUG: adding data validation to range\", rangevar)\n dv.add(rangevar)\n ws.add_data_validation(dv)\n\n # Close book\n if args.verbose:\n print(\"DEBUG: saving workbook\", output_excel_file)\n try:\n wb.save(output_excel_file)\n except Exception as e:\n print(\"ERROR: Error when saving Excel file to\", output_excel_file, \"-\", str(e))\n sys.exit(1)\n\n########\n# Main #\n########\n\n# Download checklist\nif checklist_file:\n checklist_file_list = checklist_file.split(\" \")\n # If --only-english parameter was supplied, take only the English version and remove duplicates\n if args.only_english:\n checklist_file_list = [file[:-8] + '.en.json' for file in checklist_file_list]\n checklist_file_list = list(set(checklist_file_list))\n if args.verbose:\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\n # If --find-all paramater was supplied, find all the languages for the checklist\n if args.find_all:\n new_file_list = []\n for checklist_file in checklist_file_list:\n filedir = os.path.dirname(checklist_file)\n filebase = os.path.basename(checklist_file)\n filebase_noext = filebase[:-8] # Remove '.en.json'\n file_match_list = glob.glob(os.path.join(filedir, filebase_noext + '.*.json'))\n for checklist_match in file_match_list:\n # new_file_list.append(os.path.join(filedir, checklist_match))\n new_file_list.append(checklist_match)\n checklist_file_list = list(set(new_file_list))\n if args.verbose:\n print(\"DEBUG: new checklist file list:\", str(checklist_file_list))\n # Go over the list\n for checklist_file in checklist_file_list:\n if args.verbose:\n print(\"DEBUG: Opening checklist file\", checklist_file)\n # Get JSON\n try:\n with open(checklist_file) as f:\n checklist_data = json.load(f)\n except Exception as e:\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\n sys.exit(0)\n # Set input and output files\n input_excel_file = excel_file\n if args.output_excel_file:\n output_excel_file = args.output_excel_file\n elif args.output_name_is_input_name:\n if args.output_path:\n # Get filename without path and extension\n output_excel_file = os.path.splitext(os.path.basename(checklist_file))[0] + '.xlsx'\n output_excel_file = os.path.join(args.output_path, output_excel_file)\n else:\n # Just change the extension\n output_excel_file = os.path.splitext(checklist_file)[0] + '.xlsx'\n # Update spreadsheet\n update_excel_file(input_excel_file, output_excel_file, checklist_data)\nelse:\n if technology:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/\" + technology + \"_checklist.en.json\"\n else:\n checklist_url = \"https://raw.githubusercontent.com/Azure/review-checklists/main/checklists/lz_checklist.en.json\"\n if args.verbose:\n print(\"DEBUG: Downloading checklist file from\", checklist_url)\n response = requests.get(checklist_url)\n # If download was successful\n if response.status_code == 200:\n if args.verbose:\n print (\"DEBUG: File {0} downloaded successfully\".format(checklist_url))\n try:\n # Deserialize JSON to object variable\n checklist_data = json.loads(response.text)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\n # Upload spreadsheet\n if args.output_excel_file:\n output_excel_file = args.output_excel_file\n else:\n output_excel_file = excel_file\n update_excel_file(excel_file, output_excel_file, checklist_data)\n\n\nscripts/translate.py METASEP\nimport requests\nimport os\nimport argparse\nimport sys\nimport json\nimport uuid\n\n# Variables\ntranslate_keys = ('description', 'name', 'category', 'subcategory', 'text', 'severity')\ntranslate_languages = ['es', 'ja', 'pt', 'ko']\n\n# Get environment variables\ntranslator_endpoint = os.environ[\"AZURE_TRANSLATOR_ENDPOINT\"]\ntranslator_region = os.environ[\"AZURE_TRANSLATOR_REGION\"]\ntranslator_key = os.environ[\"AZURE_TRANSLATOR_SUBSCRIPTION_KEY\"]\ntranslator_url = translator_endpoint + 'translate'\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Translate a JSON file')\nparser.add_argument('--input-file-name', dest='file_name_in', action='store',\n help='you need to supply file name where your JSON to be translated is located')\nparser.add_argument('--output-file-name', dest='file_name_out', action='store',\n help='you need to supply file name where the translated JSON will be saved')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\n\n# Check we have all information\nif translator_endpoint and translator_region and translator_key:\n if args.verbose:\n print('DEBUG: environment variables retrieved successfully: {0}, {1}, {2}'.format(translator_endpoint, translator_region, translator_key))\nelse:\n print('ERROR: couldnt retrieve environment variables for translation')\n sys.exit(1)\n\n# Get JSON\ntry:\n with open(args.file_name_in) as f:\n checklist = json.load(f)\nexcept Exception as e:\n print(\"ERROR: Error when processing JSON file\", args.file_name_in, \"-\", str(e))\n sys.exit(1)\n\n# Function to translate a single line of text to a single language\ndef translate_text(text_to_translate, languages):\n if args.verbose:\n print('DEBUG: translating text \"{0}\" on {1}...'.format(text_to_translate, translator_url))\n # If a single languages specified, convert to array\n if not type(languages) == list:\n languages = [languages]\n # Azure Translator parameters\n translator_params = {\n 'api-version': '3.0',\n 'from': 'en',\n 'to': languages\n }\n translator_headers = {\n 'Ocp-Apim-Subscription-Key': translator_key,\n 'Ocp-Apim-Subscription-Region': translator_region,\n 'Content-type': 'application/json',\n 'Accept': 'application/json',\n 'X-ClientTraceId': str(uuid.uuid4())\n }\n translator_body = [{\n 'text': text_to_translate\n }]\n if args.verbose:\n print (\"DEBUG: sending body\", str(translator_body))\n print (\"DEBUG: sending HTTP headers\", str(translator_headers))\n print (\"DEBUG: sending parameters\", str(translator_params))\n try:\n request = requests.post(translator_url, params=translator_params, headers=translator_headers, json=translator_body)\n response = request.json()\n if args.verbose:\n print(\"DEBUG: translator response:\")\n print(json.dumps(response, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': ')))\n return str(response[0]['translations'][0]['text'])\n except Exception as e:\n print(\"ERROR: Error in translation:\", str(e))\n\n# Go over all keys and translate them if required\ndef translate_object(checklist_object, language):\n translated_object = checklist_object.copy()\n for (k, v) in translated_object.items():\n if isinstance(v, list):\n translated_items = []\n for list_item in v:\n translated_items.append(translate_object(list_item, language))\n translated_object[k] = translated_items\n else:\n if k in translate_keys:\n # print(\"Found key\", k, \"and scalar value\", v)\n translated_object[k] = translate_text(v, language)\n return translated_object\n\n################\n# Main #\n################\n\nif args.verbose:\n print(\"DEBUG: Starting translations for languages\", str(translate_languages))\n\nfor using_language in translate_languages:\n print(\"INFO: Starting translation to\", using_language)\n translated_checklist = translate_object(checklist, using_language)\n # If no output file was specified, use the input file, and append the language as extension before .json\n if not args.file_name_out:\n file_name_in_base = os.path.basename(args.file_name_in)\n file_name_in_dir = os.path.dirname(args.file_name_in)\n file_name_in_noext = file_name_in_base.split('.')[0]\n file_name_out = file_name_in_noext + '.' + using_language + '.json'\n file_name_out = os.path.join(file_name_in_dir, file_name_out)\n print(\"INFO: saving output file to\", file_name_out)\n translated_checklist_string = json.dumps(translated_checklist, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\n with open(file_name_out, 'w', encoding='utf-8') as f:\n f.write(translated_checklist_string)\n f.close()\n # print(json.dumps(translated_checklist, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': ')))\n\nscripts/timestamp_checklist.py METASEP\n#################################################################################\n#\n# This script updates the timestamp of a specific checklist and saves it.\n# \n# Last updated: January 2023\n#\n#################################################################################\n\nimport json\nimport argparse\nimport sys\nimport requests\nimport datetime\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Timestamp an Azure Review Checklist')\nparser.add_argument('--input-file', dest='input_file', action='store',\n help='You need to supply the name of the JSON file with the checklist to be timestamped')\nparser.add_argument('--output-file', dest='output_file', action='store',\n help='You can optionally supply the name of a new JSON file that will be used to save the output. Otherwise the sorted checklist will replace the unused one')\nparser.add_argument('--dry-run', dest='dry_run', action='store_true',\n default=False,\n help='do not save anything, only output to console (default: False)')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\n\nif not args.input_file:\n print(\"ERROR: no input file specified, not doing anything\")\n\n# Load the checklist\ntry:\n with open(args.input_file) as f:\n checklist = json.load(f)\nexcept Exception as e:\n print(\"ERROR: Error when processing JSON file, nothing changed\", args.input_file, \"-\", str(e))\n\n# Overwrite the timestamp\nchecklist['metadata']['timestamp'] = datetime.date.today().strftime(\"%B %d, %Y\")\n\n# If dry-run, show on screen\nif args.dry_run:\n print(json.dumps(checklist, indent=4))\n\n# Saving output file if specified in the argument\nif not args.dry_run:\n if args.output_file:\n output_file = args.output_file\n else:\n output_file = args.input_file\n if args.verbose:\n print(\"DEBUG: saving output file to\", output_file)\n checklist_string = json.dumps(checklist, indent=4)\n with open(output_file, 'w', encoding='utf-8') as f:\n f.write(checklist_string)\n f.close()\n\nscripts/sort_checklist.py METASEP\n#################################################################################\n#\n# This script sorts a specific checklist and saves it.\n# \n# Last updated: January 2023\n#\n#################################################################################\n\nimport json\nimport argparse\nimport sys\nimport requests\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--input-file', dest='input_file', action='store',\n help='You need to supply the name of the JSON file with the checklist to be filtered')\nparser.add_argument('--output-file', dest='output_file', action='store',\n help='You can optionally supply the name of a new JSON file that will be used to save the output. Otherwise the sorted checklist will replace the unused one')\nparser.add_argument('--dry-run', dest='dry_run', action='store_true',\n default=False,\n help='do not save anything, only output to console (default: False)')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\n\nif not args.input_file:\n print(\"ERROR: no input file specified, not doing anything\")\n\n# Load the checklist\ntry:\n with open(args.input_file) as f:\n checklist = json.load(f)\nexcept Exception as e:\n print(\"ERROR: Error when processing JSON file, nothing changed\", args.input_file, \"-\", str(e))\n\n# Sort the items per category and subcategory\nitems = checklist['items']\nitems = sorted(items, key=lambda k: (k['category'],k[\"subcategory\"]))\nchecklist['items'] = items\n\n# If dry-run, show on screen\nif args.dry_run:\n print(json.dumps(checklist, indent=4))\n\n# Saving output file if specified in the argument\nif not args.dry_run:\n if args.output_file:\n output_file = args.output_file\n else:\n output_file = args.input_file\n if args.verbose:\n print(\"DEBUG: saving output file to\", output_file)\n checklist_string = json.dumps(checklist, indent=4)\n with open(output_file, 'w', encoding='utf-8') as f:\n f.write(checklist_string)\n f.close()\n\nscripts/compile_checklist.py METASEP\n#################################################################################\n#\n# This script attempts to build a unified checklist out of all the different checklists\n# stored in this repo, and optionally filter it per design area.\n# \n# Last updated: June 2022\n#\n#################################################################################\n\nimport json\nimport argparse\nimport sys\nimport requests\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--output-file', dest='output_file', action='store',\n help='You can optionally supply the name of the JSON file that will be created. Otherwise no output will be generated')\nparser.add_argument('--category', dest='category_filter', action='store',\n help='You can optionally provide a category name as a filter')\nparser.add_argument('--checklist-name', dest='new_checklist_name', action='store',\n default='Combined checklist',\n help='You can optionally provide a category name as a filter')\nparser.add_argument('--print-categories', dest='print_categories', action='store_true',\n default=False,\n help='print the categories of the combined checklist (default: False)')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\n\nif args.category_filter:\n category_filter = args.category_filter.lower()\n\n# Variables\nrepo_contents_url = 'https://api.github.com/repos/azure/review-checklists/contents/checklists'\n\n# Get existing checklists in the repo\nresponse = requests.get(repo_contents_url)\n# If download was successful\nif response.status_code == 200:\n if args.verbose:\n print (\"DEBUG: Github contents downloaded successfully from {0}\".format(repo_contents_url))\n try:\n content_data = json.loads(response.text)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\n\n# Get the list of checklist files\nchecklist_urls = []\nif content_data:\n for github_object in content_data:\n if github_object['name'][-7:] == 'en.json':\n checklist_urls.append(github_object['download_url'])\nelse:\n print(\"Error deserializing JSON content from GitHub repository contents: {0}\".format(str(e)))\n sys.exit(1)\nif args.verbose:\n print(\"DEBUG: {0} checklists found\".format(str(len(checklist_urls))))\n\n# Load all of the items in memory\nnew_checklist = { \n 'items': [],\n 'status': [\n {'name': 'Not verified', 'description': 'This check has not been looked at yet'},\n {'name': 'Open', 'description': 'There is an action item associated to this check'},\n {'name': 'Fulfilled', 'description': 'This check has been verified, and there are no further action items associated to it'},\n {'name': 'Not required', 'description': 'Recommendation understood, but not needed by current requirements'},\n {'name': 'N/A', 'description': 'Not applicable for current design'}\n ],\n 'severities': [ {'name': 'High'}, {'name': 'Medium'}, {'name': 'Low'} ],\n 'categories': [],\n 'metadata': { 'name': args.new_checklist_name }\n }\nfor checklist_url in checklist_urls:\n if args.verbose:\n print(\"DEBUG: Downloading checklist file from\", checklist_url)\n response = requests.get(checklist_url)\n if response.status_code == 200:\n if args.verbose:\n print (\"DEBUG: File {0} downloaded successfully\".format(checklist_url))\n try:\n # Deserialize JSON to object variable\n checklist_data = json.loads(response.text)\n checklist_name = checklist_data['metadata']['name']\n for item in checklist_data['items']:\n if checklist_name:\n item['checklist'] = checklist_name\n item_category = str(item['category']).lower()\n if not args.category_filter or item_category.__contains__(category_filter):\n new_checklist['items'].append(item)\n except Exception as e:\n print(\"Error deserializing JSON content: {0}\".format(str(e)))\n sys.exit(1)\nif args.verbose:\n print(\"DEBUG: Resulting combined checklist has {0} items\".format(str(len(new_checklist['items']))))\n\n# Add the categories to the new checklist\ncategories = []\nfor item in new_checklist['items']:\n category_name=item['checklist'] + '/' + item['category']\n if not category_name in categories:\n categories.append(category_name)\nif args.verbose:\n print(\"DEBUG: {0} categories found\".format(str(len(categories))))\nfor category in categories:\n new_checklist['categories'].append({'name': category})\n if args.print_categories:\n print(category)\n\n# Saving output file if specified in the argument\nif args.output_file:\n if args.verbose:\n print(\"DEBUG: saving output file to\", args.output_file)\n new_checklist_string = json.dumps(new_checklist)\n with open(args.output_file, 'w', encoding='utf-8') as f:\n f.write(new_checklist_string)\n f.close()\n\nscripts/checklist_graph_update.py METASEP\n#################################################################################\n#\n# This is a study on two libraries to update Excel files: openpyxl and xlwings\n# This exercise has shown that openpyxl breaks the xlsx files in this repo (maybe\n# because of the macros, or the formulae), while xlwings works fine.\n#\n# This script reads a previously generated JSON file with the results of Azure\n# Resource Graph queries, and stores them in the 'Comments' column of a\n# spreadsheet. Both the JSON file and the spreadsheet file are supplied as\n# parameters.\n# \n# Last updated: March 2022\n#\n#################################################################################\n\nimport json\nimport argparse\nimport sys\nfrom pandas import DataFrame\nfrom openpyxl import load_workbook\nimport xlwings as xw\n\n# Get input arguments\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\nparser.add_argument('--graph-file', dest='graph_file', action='store',\n help='You need to supply a JSON file containing the results of Azure Resource Graph Queries')\nparser.add_argument('--excel-file', dest='excel_file', action='store',\n help='You need to supply an Excel file where the query results will be stored')\nparser.add_argument('--mode', dest='mode', action='store', default=\"openpyxl\",\n help='It can be either xlwings or openpyxl (default is openpyxl)')\nparser.add_argument('--verbose', dest='verbose', action='store_true',\n default=False,\n help='run in verbose mode (default: False)')\nargs = parser.parse_args()\ngraph_file = args.graph_file\nexcel_file = args.excel_file\nmode = args.mode\n\n# Constants\nguid_column_index = \"K\"\ncomment_column_index = \"G\"\nsample_cell_index = 'A4'\n\n# Get JSON\ntry:\n with open(graph_file) as f:\n graph_data = json.load(f)\nexcept Exception as e:\n print(\"ERROR: Error when processing JSON file\", graph_file, \"-\", str(e))\n sys.exit(1)\n\n# Load workbook\ntry:\n if mode == 'openpyxl':\n if args.verbose:\n print(\"DEBUG: working with openpyxl library\")\n wb = load_workbook(filename = excel_file)\n ws = wb['Checklist']\n elif mode == 'xlwings':\n if args.verbose:\n print(\"DEBUG: working with xlwings library\")\n wb = xw.Book(excel_file)\n ws = wb.sheets['Checklist']\n else:\n print(\"ERROR: mode {0} not recognized\".format(mode))\nexcept Exception as e:\n print(\"ERROR: Error when opening Excel file\", excel_file, \"-\", str(e))\n sys.exit(1)\n\n# Print specific cell\nif args.verbose:\n print(\"DEBUG: looking at spreadsheet for\", ws[sample_cell_index].value)\n\n# Get GUID column into a list\nif mode == 'openpyxl':\n guid_col = ws[guid_column_index]\n guid_col_values = [x.value for x in guid_col]\n if args.verbose:\n print(\"DEBUG: GUID column retrieved with\", str(len(guid_col_values)), \"values\")\nelif mode == 'xlwings':\n guid_col_values = ws.range(guid_column_index + \":\" + guid_column_index).value\n if args.verbose:\n print(\"DEBUG: GUID column retrieved with\", str(len(guid_col_values)), \"values\")\nelse:\n print(\"ERROR: mode {0} not recognized\".format(mode))\n sys.exit(1)\n\n# Go over all checks in the JSON file\nfor check in graph_data['checks']:\n guid = check['guid']\n arm_id = check['id']\n compliant = check['compliant']\n if (compliant == \"false\"):\n comment = \"Non-compliant: {0}\\n\".format(arm_id)\n elif (compliant == \"true\"):\n comment = \"Compliant: {0}\\n\".format(arm_id)\n else:\n print(\"ERROR: compliant status {0} not recognized\".format(compliant))\n # Find the guid in the list\n if guid in guid_col_values:\n row = guid_col_values.index(guid)\n cell_index = comment_column_index + str(row)\n print(\"DEBUG: updating cell\", cell_index)\n if mode == 'openpyxl':\n ws[cell_index] = comment\n elif mode == 'xlwings':\n ws.range(cell_index).value = comment\n else:\n print(\"ERROR: could not find GUID {0} in the Excel list\".format(guid))\n\n# Saving file\nif mode == 'openpyxl':\n print(\"DEBUG: saving workbook\", excel_file)\n try:\n wb.save(excel_file)\n except Exception as e:\n print(\"ERROR: Error when saving Excel file\", excel_file, \"-\", str(e))\n sys.exit(1)\nelif mode == 'xlwings':\n print(\"DEBUG: saving workbook\", excel_file)\n try:\n wb.save()\n except Exception as e:\n print(\"ERROR: Error when saving Excel file\", excel_file, \"-\", str(e))\n sys.exit(1)\nelse:\n print(\"ERROR: mode {0} not recognized\".format(mode))\n\nscripts/create_master_checklist.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"status\"):\r\n status = item.get(\"name\")\r\n description = item.get(\"description\")\r\n wsv[col_values_status + str(row_counter)].value = status\r\n wsv[col_values_description + str(row_counter)].value = description\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\r\n\r\n # Update severities\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"severities\"):\r\n severity = item.get(\"name\")\r\n wsv[col_values_severity + str(row_counter)].value = severity\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"severities addedd to Excel spreadsheet\")\r\n\r\n # Data validation\r\n # dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\r\n dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True)\r\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\r\n if args.verbose:\r\n print(\"DEBUG: adding data validation to range\", rangevar)\r\n dv.add(rangevar)\r\n ws.add_data_validation(dv)\r\n\r\n # Close book\r\n if args.verbose:\r\n print(\"DEBUG: saving workbook\", output_excel_file)\r\n try:\r\n wb.save(output_excel_file)\r\n except Exception as e:\r\n print(\"ERROR: Error when saving Excel file to\", output_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# Download checklist\r\nif args.input_folder:\r\n # Get consolidated checklist\r\n checklist_master_data = get_consolidated_checklist(args.input_folder, args.language)\r\n # Set output file variables\r\n xlsx_output_file = os.path.join(args.xlsx_output_folder, args.output_name + \".xlsx\")\r\n json_output_file = os.path.join(args.json_output_folder, args.output_name + \".json\")\r\n # Dump master checklist to JSON file\r","type":"infile"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"status\"):\r\n status = item.get(\"name\")\r\n description = item.get(\"description\")\r\n wsv[col_values_status + str(row_counter)].value = status\r\n wsv[col_values_description + str(row_counter)].value = description\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\r\n\r\n # Update severities\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"severities\"):\r\n severity = item.get(\"name\")\r\n wsv[col_values_severity + str(row_counter)].value = severity\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"severities addedd to Excel spreadsheet\")\r\n\r\n # Data validation\r\n # dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\r\n dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True)\r\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\r\n if args.verbose:\r\n print(\"DEBUG: adding data validation to range\", rangevar)\r\n dv.add(rangevar)\r\n ws.add_data_validation(dv)\r\n\r\n # Close book\r\n if args.verbose:\r\n print(\"DEBUG: saving workbook\", output_excel_file)\r\n try:\r\n wb.save(output_excel_file)\r\n except Exception as e:\r\n print(\"ERROR: Error when saving Excel file to\", output_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# Download checklist\r\nif args.input_folder:\r\n # Get consolidated checklist\r\n checklist_master_data = get_consolidated_checklist(args.input_folder, args.language)\r\n # Set output file variables\r\n xlsx_output_file = os.path.join(args.xlsx_output_folder, args.output_name + \".xlsx\")\r\n json_output_file = os.path.join(args.json_output_folder, args.output_name + \".json\")\r\n # Dump master checklist to JSON file\r\n dump_json_file(checklist_master_data, json_output_file)\r\n # Update spreadsheet\r","type":"infile"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"status\"):\r\n status = item.get(\"name\")\r\n description = item.get(\"description\")\r\n wsv[col_values_status + str(row_counter)].value = status\r\n wsv[col_values_description + str(row_counter)].value = description\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\r\n\r\n # Update severities\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"severities\"):\r\n severity = item.get(\"name\")\r\n wsv[col_values_severity + str(row_counter)].value = severity\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"severities addedd to Excel spreadsheet\")\r\n\r\n # Data validation\r\n # dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\r\n dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True)\r\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\r\n if args.verbose:\r\n print(\"DEBUG: adding data validation to range\", rangevar)\r\n dv.add(rangevar)\r\n ws.add_data_validation(dv)\r\n\r\n # Close book\r\n if args.verbose:\r\n print(\"DEBUG: saving workbook\", output_excel_file)\r\n try:\r\n wb.save(output_excel_file)\r\n except Exception as e:\r\n print(\"ERROR: Error when saving Excel file to\", output_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n########\r\n# Main #\r\n########\r\n\r\n# Download checklist\r\nif args.input_folder:\r\n # Get consolidated checklist\r","type":"infile"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"status\"):\r\n status = item.get(\"name\")\r\n description = item.get(\"description\")\r\n wsv[col_values_status + str(row_counter)].value = status\r\n wsv[col_values_description + str(row_counter)].value = description\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\r\n\r\n # Update severities\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"severities\"):\r\n severity = item.get(\"name\")\r\n wsv[col_values_severity + str(row_counter)].value = severity\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"severities addedd to Excel spreadsheet\")\r\n\r\n # Data validation\r\n # dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\r\n dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True)\r\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\r\n if args.verbose:\r\n print(\"DEBUG: adding data validation to range\", rangevar)\r\n dv.add(rangevar)\r\n ws.add_data_validation(dv)\r\n\r\n # Close book\r\n if args.verbose:\r\n print(\"DEBUG: saving workbook\", output_excel_file)\r\n try:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"status\"):\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"status\"):\r\n status = item.get(\"name\")\r\n description = item.get(\"description\")\r\n wsv[col_values_status + str(row_counter)].value = status\r\n wsv[col_values_description + str(row_counter)].value = description\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\r\n\r\n # Update severities\r\n row_counter = values_row1\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"status\"):\r\n status = item.get(\"name\")\r\n description = item.get(\"description\")\r\n wsv[col_values_status + str(row_counter)].value = status\r\n wsv[col_values_description + str(row_counter)].value = description\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\r\n\r\n # Update severities\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"severities\"):\r","type":"common"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r","type":"non_informative"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r\n ws[col_guid + str(row_counter)].value = guid\r\n # Next row\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n number_of_checks = row_counter - row1\r\n print(\"DEBUG:\", str(number_of_checks), \"checks addedd to Excel spreadsheet\")\r\n\r\n # Get worksheet\r\n try:\r\n wsv = wb[worksheet_values_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_values_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_values_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Update status\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"status\"):\r\n status = item.get(\"name\")\r\n description = item.get(\"description\")\r\n wsv[col_values_status + str(row_counter)].value = status\r\n wsv[col_values_description + str(row_counter)].value = description\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"statuses addedd to Excel spreadsheet\")\r\n\r\n # Update severities\r\n row_counter = values_row1\r\n for item in checklist_data.get(\"severities\"):\r\n severity = item.get(\"name\")\r\n wsv[col_values_severity + str(row_counter)].value = severity\r\n row_counter += 1\r\n\r\n # Display summary\r\n if args.verbose:\r\n print(\"DEBUG:\", str(row_counter - values_row1), \"severities addedd to Excel spreadsheet\")\r\n\r\n # Data validation\r\n # dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\r\n dv = DataValidation(type=\"list\", formula1='=Values!$B$2:$B$6', allow_blank=True)\r\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\r\n if args.verbose:\r\n print(\"DEBUG: adding data validation to range\", rangevar)\r\n dv.add(rangevar)\r\n ws.add_data_validation(dv)\r\n\r\n # Close book\r\n if args.verbose:\r","type":"non_informative"},{"content":"","type":"non_informative"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r","type":"non_informative"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r","type":"non_informative"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r\n ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r\n if args.verbose:\r\n print(\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\".format(checklist_data[\"metadata\"][\"name\"]))\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get default status from the JSON, default to \"Not verified\"\r\n try:\r\n status_list = checklist_data.get(\"status\")\r\n default_status = status_list[0].get(\"name\")\r\n if args.verbose:\r\n print (\"DEBUG: default status retrieved from checklist: '{0}'\".format(default_status))\r\n except:\r\n default_status = \"Not verified\"\r\n if args.verbose:\r\n print (\"DEBUG: Using default status 'Not verified'\")\r\n pass\r\n\r\n # For each checklist item, add a row to spreadsheet\r\n row_counter = row1\r\n for item in checklist_data.get(\"items\"):\r\n # Read variables from JSON\r\n checklist_name = item.get(\"checklist\")\r\n guid = item.get(\"guid\")\r\n category = item.get(\"category\")\r\n subcategory = item.get(\"subcategory\")\r\n text = item.get(\"text\")\r\n description = item.get(\"description\")\r\n severity = item.get(\"severity\")\r\n link = item.get(\"link\")\r\n training = item.get(\"training\")\r\n status = default_status\r\n graph_query_success = item.get(\"graph_success\")\r\n graph_query_failure = item.get(\"graph_failure\")\r\n # Update Excel\r\n ws[col_checklist + str(row_counter)].value = checklist_name\r\n ws[col_area + str(row_counter)].value = category\r\n ws[col_subarea + str(row_counter)].value = subcategory\r\n ws[col_check + str(row_counter)].value = text\r\n ws[col_desc + str(row_counter)].value = description\r\n ws[col_sev + str(row_counter)].value = severity\r\n ws[col_status + str(row_counter)].value = status\r\n ws[col_link + str(row_counter)].value = link\r\n # if link != None:\r\n # link_elements = link.split('#')\r\n # link_address = link_elements[0]\r\n # if len(link_elements) > 1:\r\n # link_subaddress = link_elements[1]\r\n # else:\r\n # link_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\"\", TextToDisplay=info_link_text)\r\n ws[col_training + str(row_counter)].value = training\r\n # if training != None:\r\n # training_elements = training.split('#')\r\n # training_address = training_elements[0]\r\n # if len(training_elements) > 1:\r\n # training_subaddress = training_elements[1]\r\n # else:\r\n # training_subaddress = \"\"\r\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\"\", TextToDisplay=training_link_text)\r\n # GUID and ARG queries\r\n ws[col_arg_success + str(row_counter)].value = graph_query_success\r\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\r","type":"random"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r","type":"random"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r","type":"random"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r","type":"random"},{"content":"######################################################################\r\n#\r\n# This script combines all of the existing checklists into one big\r\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\r\n#\r\n# Example usage:\r\n# python3 ./scripts/create_master_checklist.py \\\r\n# --input-folder=\"./checklists\" \\\r\n# --language=\"en\" \\\r\n# --excel-file=\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\" \\\r\n# --output-name=\"checklist.en.master\" \\\r\n# --json-output-folder=\"./checklists/\" \\\r\n# --xlsx-output-folder=\"./spreadsheet/macrofree/\"\r\n# \r\n# Last updated: March 2022\r\n#\r\n######################################################################\r\n\r\nimport json\r\nimport argparse\r\nimport sys\r\nimport os\r\nimport requests\r\nimport glob\r\nimport datetime\r\nfrom openpyxl import load_workbook\r\nfrom openpyxl.worksheet.datavalidation import DataValidation\r\n\r\n# Get input arguments\r\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\r\nparser.add_argument('--input-folder', dest='input_folder', action='store',\r\n help='Input folder where the checklists to merge are stored')\r\nparser.add_argument('--language', dest='language', action='store', default='en',\r\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\r\nparser.add_argument('--excel-file', dest='excel_file', action='store',\r\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\r\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\r\n help='Folder where to store the JSON output')\r\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\r\n help='Folder where to store the macro free Excel output')\r\nparser.add_argument('--output-name', dest='output_name', action='store',\r\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\r\nparser.add_argument('--verbose', dest='verbose', action='store_true',\r\n default=False,\r\n help='run in verbose mode (default: False)')\r\nargs = parser.parse_args()\r\n\r\n# Consolidate all checklists into one big checklist object\r\ndef get_consolidated_checklist(input_folder, language):\r\n # Initialize checklist object\r\n checklist_master_data = {\r\n 'items': [],\r\n 'metadata': {\r\n 'name': 'Master checklist',\r\n 'timestamp': datetime.date.today().strftime(\"%B %d, %Y\")\r\n }\r\n }\r\n # Find all files in the input folder matching the pattern \"language*.json\"\r\n if args.verbose:\r\n print(\"DEBUG: looking for JSON files in folder\", input_folder, \"with pattern *.\", language + \".json...\")\r\n checklist_files = glob.glob(input_folder + \"/*.\" + language + \".json\")\r\n if args.verbose:\r\n print(\"DEBUG: found\", len(checklist_files), \"JSON files\")\r\n for checklist_file in checklist_files:\r\n # Get JSON\r\n try:\r\n with open(checklist_file) as f:\r\n checklist_data = json.load(f)\r\n if args.verbose:\r\n print(\"DEBUG: JSON file\", checklist_file, \"loaded successfully with {0} items\".format(len(checklist_data[\"items\"])))\r\n for item in checklist_data[\"items\"]:\r\n # Add field with the name of the checklist\r\n item[\"checklist\"] = checklist_data[\"metadata\"][\"name\"]\r\n # Add items to the master checklist\r\n checklist_master_data['items'] += checklist_data['items']\r\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\r\n checklist_master_data['severities'] = checklist_data['severities']\r\n checklist_master_data['status'] = checklist_data['status']\r\n except Exception as e:\r\n print(\"ERROR: Error when processing JSON file\", checklist_file, \"-\", str(e))\r\n if args.verbose:\r\n print(\"DEBUG: master checklist contains\", len(checklist_master_data[\"items\"]), \"items\")\r\n return checklist_master_data\r\n\r\n# Dump JSON object to file\r\ndef dump_json_file(json_object, filename):\r\n if args.verbose:\r\n print(\"DEBUG: dumping JSON object to file\", filename)\r\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\r\n with open(filename, 'w', encoding='utf-8') as f:\r\n f.write(json_string)\r\n f.close()\r\n\r\n# Main function\r\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\r\n # Constants\r\n worksheet_checklist_name = 'Checklist'\r\n row1 = 8 # First row after which the Excel spreadsheet will be updated\r\n col_checklist_name = \"A\"\r\n row_checklist_name = \"4\"\r\n guid_column_index = \"L\"\r\n comment_column_index = \"G\"\r\n sample_cell_index = 'A4'\r\n col_checklist=\"A\"\r\n col_area = \"B\"\r\n col_subarea = \"C\"\r\n col_check = \"D\"\r\n col_desc = \"E\"\r\n col_sev = \"F\"\r\n col_status = \"G\"\r\n col_comment = \"H\"\r\n col_link = \"I\"\r\n col_training = \"J\"\r\n col_arg_success = \"K\"\r\n col_arg_failure = \"L\"\r\n col_guid = \"M\"\r\n info_link_text = 'More info'\r\n training_link_text = 'Training'\r\n worksheet_values_name = 'Values'\r\n values_row1 = 2\r\n col_values_severity = \"A\"\r\n col_values_status = \"B\"\r\n col_values_area = \"C\"\r\n col_values_description = \"H\"\r\n\r\n # Load workbook\r\n try:\r\n wb = load_workbook(filename = input_excel_file)\r\n if args.verbose:\r\n print(\"DEBUG: workbook\", input_excel_file, \"opened successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when opening Excel file\", input_excel_file, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Get worksheet\r\n try:\r\n ws = wb[worksheet_checklist_name]\r\n if args.verbose:\r\n print(\"DEBUG: worksheet\", worksheet_checklist_name, \"selected successfully\")\r\n except Exception as e:\r\n print(\"ERROR: Error when selecting worksheet\", worksheet_checklist_name, \"-\", str(e))\r\n sys.exit(1)\r\n\r\n # Set checklist name\r\n try:\r","type":"random"}],"string":"[\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"status\\\"):\\r\\n status = item.get(\\\"name\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n wsv[col_values_status + str(row_counter)].value = status\\r\\n wsv[col_values_description + str(row_counter)].value = description\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"statuses addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Update severities\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"severities\\\"):\\r\\n severity = item.get(\\\"name\\\")\\r\\n wsv[col_values_severity + str(row_counter)].value = severity\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"severities addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Data validation\\r\\n # dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\\r\\n dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True)\\r\\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: adding data validation to range\\\", rangevar)\\r\\n dv.add(rangevar)\\r\\n ws.add_data_validation(dv)\\r\\n\\r\\n # Close book\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: saving workbook\\\", output_excel_file)\\r\\n try:\\r\\n wb.save(output_excel_file)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when saving Excel file to\\\", output_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# Download checklist\\r\\nif args.input_folder:\\r\\n # Get consolidated checklist\\r\\n checklist_master_data = get_consolidated_checklist(args.input_folder, args.language)\\r\\n # Set output file variables\\r\\n xlsx_output_file = os.path.join(args.xlsx_output_folder, args.output_name + \\\".xlsx\\\")\\r\\n json_output_file = os.path.join(args.json_output_folder, args.output_name + \\\".json\\\")\\r\\n # Dump master checklist to JSON file\\r\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"status\\\"):\\r\\n status = item.get(\\\"name\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n wsv[col_values_status + str(row_counter)].value = status\\r\\n wsv[col_values_description + str(row_counter)].value = description\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"statuses addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Update severities\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"severities\\\"):\\r\\n severity = item.get(\\\"name\\\")\\r\\n wsv[col_values_severity + str(row_counter)].value = severity\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"severities addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Data validation\\r\\n # dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\\r\\n dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True)\\r\\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: adding data validation to range\\\", rangevar)\\r\\n dv.add(rangevar)\\r\\n ws.add_data_validation(dv)\\r\\n\\r\\n # Close book\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: saving workbook\\\", output_excel_file)\\r\\n try:\\r\\n wb.save(output_excel_file)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when saving Excel file to\\\", output_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# Download checklist\\r\\nif args.input_folder:\\r\\n # Get consolidated checklist\\r\\n checklist_master_data = get_consolidated_checklist(args.input_folder, args.language)\\r\\n # Set output file variables\\r\\n xlsx_output_file = os.path.join(args.xlsx_output_folder, args.output_name + \\\".xlsx\\\")\\r\\n json_output_file = os.path.join(args.json_output_folder, args.output_name + \\\".json\\\")\\r\\n # Dump master checklist to JSON file\\r\\n dump_json_file(checklist_master_data, json_output_file)\\r\\n # Update spreadsheet\\r\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"status\\\"):\\r\\n status = item.get(\\\"name\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n wsv[col_values_status + str(row_counter)].value = status\\r\\n wsv[col_values_description + str(row_counter)].value = description\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"statuses addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Update severities\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"severities\\\"):\\r\\n severity = item.get(\\\"name\\\")\\r\\n wsv[col_values_severity + str(row_counter)].value = severity\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"severities addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Data validation\\r\\n # dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\\r\\n dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True)\\r\\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: adding data validation to range\\\", rangevar)\\r\\n dv.add(rangevar)\\r\\n ws.add_data_validation(dv)\\r\\n\\r\\n # Close book\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: saving workbook\\\", output_excel_file)\\r\\n try:\\r\\n wb.save(output_excel_file)\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when saving Excel file to\\\", output_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n########\\r\\n# Main #\\r\\n########\\r\\n\\r\\n# Download checklist\\r\\nif args.input_folder:\\r\\n # Get consolidated checklist\\r\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"status\\\"):\\r\\n status = item.get(\\\"name\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n wsv[col_values_status + str(row_counter)].value = status\\r\\n wsv[col_values_description + str(row_counter)].value = description\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"statuses addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Update severities\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"severities\\\"):\\r\\n severity = item.get(\\\"name\\\")\\r\\n wsv[col_values_severity + str(row_counter)].value = severity\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"severities addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Data validation\\r\\n # dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\\r\\n dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True)\\r\\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: adding data validation to range\\\", rangevar)\\r\\n dv.add(rangevar)\\r\\n ws.add_data_validation(dv)\\r\\n\\r\\n # Close book\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: saving workbook\\\", output_excel_file)\\r\\n try:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"status\\\"):\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"status\\\"):\\r\\n status = item.get(\\\"name\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n wsv[col_values_status + str(row_counter)].value = status\\r\\n wsv[col_values_description + str(row_counter)].value = description\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"statuses addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Update severities\\r\\n row_counter = values_row1\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"status\\\"):\\r\\n status = item.get(\\\"name\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n wsv[col_values_status + str(row_counter)].value = status\\r\\n wsv[col_values_description + str(row_counter)].value = description\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"statuses addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Update severities\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"severities\\\"):\\r\",\n \"type\": \"common\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\\n ws[col_guid + str(row_counter)].value = guid\\r\\n # Next row\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n number_of_checks = row_counter - row1\\r\\n print(\\\"DEBUG:\\\", str(number_of_checks), \\\"checks addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n wsv = wb[worksheet_values_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_values_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_values_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Update status\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"status\\\"):\\r\\n status = item.get(\\\"name\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n wsv[col_values_status + str(row_counter)].value = status\\r\\n wsv[col_values_description + str(row_counter)].value = description\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"statuses addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Update severities\\r\\n row_counter = values_row1\\r\\n for item in checklist_data.get(\\\"severities\\\"):\\r\\n severity = item.get(\\\"name\\\")\\r\\n wsv[col_values_severity + str(row_counter)].value = severity\\r\\n row_counter += 1\\r\\n\\r\\n # Display summary\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG:\\\", str(row_counter - values_row1), \\\"severities addedd to Excel spreadsheet\\\")\\r\\n\\r\\n # Data validation\\r\\n # dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True, showDropDown=True)\\r\\n dv = DataValidation(type=\\\"list\\\", formula1='=Values!$B$2:$B$6', allow_blank=True)\\r\\n rangevar = col_status + str(row1) +':' + col_status + str(row1 + number_of_checks)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: adding data validation to range\\\", rangevar)\\r\\n dv.add(rangevar)\\r\\n ws.add_data_validation(dv)\\r\\n\\r\\n # Close book\\r\\n if args.verbose:\\r\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\\n ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: starting filling the Excel spreadsheet with the values of checklist '{0}'\\\".format(checklist_data[\\\"metadata\\\"][\\\"name\\\"]))\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get default status from the JSON, default to \\\"Not verified\\\"\\r\\n try:\\r\\n status_list = checklist_data.get(\\\"status\\\")\\r\\n default_status = status_list[0].get(\\\"name\\\")\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: default status retrieved from checklist: '{0}'\\\".format(default_status))\\r\\n except:\\r\\n default_status = \\\"Not verified\\\"\\r\\n if args.verbose:\\r\\n print (\\\"DEBUG: Using default status 'Not verified'\\\")\\r\\n pass\\r\\n\\r\\n # For each checklist item, add a row to spreadsheet\\r\\n row_counter = row1\\r\\n for item in checklist_data.get(\\\"items\\\"):\\r\\n # Read variables from JSON\\r\\n checklist_name = item.get(\\\"checklist\\\")\\r\\n guid = item.get(\\\"guid\\\")\\r\\n category = item.get(\\\"category\\\")\\r\\n subcategory = item.get(\\\"subcategory\\\")\\r\\n text = item.get(\\\"text\\\")\\r\\n description = item.get(\\\"description\\\")\\r\\n severity = item.get(\\\"severity\\\")\\r\\n link = item.get(\\\"link\\\")\\r\\n training = item.get(\\\"training\\\")\\r\\n status = default_status\\r\\n graph_query_success = item.get(\\\"graph_success\\\")\\r\\n graph_query_failure = item.get(\\\"graph_failure\\\")\\r\\n # Update Excel\\r\\n ws[col_checklist + str(row_counter)].value = checklist_name\\r\\n ws[col_area + str(row_counter)].value = category\\r\\n ws[col_subarea + str(row_counter)].value = subcategory\\r\\n ws[col_check + str(row_counter)].value = text\\r\\n ws[col_desc + str(row_counter)].value = description\\r\\n ws[col_sev + str(row_counter)].value = severity\\r\\n ws[col_status + str(row_counter)].value = status\\r\\n ws[col_link + str(row_counter)].value = link\\r\\n # if link != None:\\r\\n # link_elements = link.split('#')\\r\\n # link_address = link_elements[0]\\r\\n # if len(link_elements) > 1:\\r\\n # link_subaddress = link_elements[1]\\r\\n # else:\\r\\n # link_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_link + str(row_counter)].api, Address=link_address, SubAddress=link_subaddress, ScreenTip=\\\"\\\", TextToDisplay=info_link_text)\\r\\n ws[col_training + str(row_counter)].value = training\\r\\n # if training != None:\\r\\n # training_elements = training.split('#')\\r\\n # training_address = training_elements[0]\\r\\n # if len(training_elements) > 1:\\r\\n # training_subaddress = training_elements[1]\\r\\n # else:\\r\\n # training_subaddress = \\\"\\\"\\r\\n # ws.api.Hyperlinks.Add (Anchor=ws[col_training + str(row_counter)].api, Address=training_address, SubAddress=training_subaddress, ScreenTip=\\\"\\\", TextToDisplay=training_link_text)\\r\\n # GUID and ARG queries\\r\\n ws[col_arg_success + str(row_counter)].value = graph_query_success\\r\\n ws[col_arg_failure + str(row_counter)].value = graph_query_failure\\r\",\n \"type\": \"random\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\",\n \"type\": \"random\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\",\n \"type\": \"random\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\",\n \"type\": \"random\"\n },\n {\n \"content\": \"######################################################################\\r\\n#\\r\\n# This script combines all of the existing checklists into one big\\r\\n# checklist, and saves it in JSON and XLSX (macrofree) formats.\\r\\n#\\r\\n# Example usage:\\r\\n# python3 ./scripts/create_master_checklist.py \\\\\\r\\n# --input-folder=\\\"./checklists\\\" \\\\\\r\\n# --language=\\\"en\\\" \\\\\\r\\n# --excel-file=\\\"./spreadsheet/macrofree/review_checklist_master_empty.xlsx\\\" \\\\\\r\\n# --output-name=\\\"checklist.en.master\\\" \\\\\\r\\n# --json-output-folder=\\\"./checklists/\\\" \\\\\\r\\n# --xlsx-output-folder=\\\"./spreadsheet/macrofree/\\\"\\r\\n# \\r\\n# Last updated: March 2022\\r\\n#\\r\\n######################################################################\\r\\n\\r\\nimport json\\r\\nimport argparse\\r\\nimport sys\\r\\nimport os\\r\\nimport requests\\r\\nimport glob\\r\\nimport datetime\\r\\nfrom openpyxl import load_workbook\\r\\nfrom openpyxl.worksheet.datavalidation import DataValidation\\r\\n\\r\\n# Get input arguments\\r\\nparser = argparse.ArgumentParser(description='Update a checklist spreadsheet with JSON-formatted Azure Resource Graph results')\\r\\nparser.add_argument('--input-folder', dest='input_folder', action='store',\\r\\n help='Input folder where the checklists to merge are stored')\\r\\nparser.add_argument('--language', dest='language', action='store', default='en',\\r\\n help='if checklist files are specified, ignore the non-English ones and only generate a spreadsheet for the English version (default: False)')\\r\\nparser.add_argument('--excel-file', dest='excel_file', action='store',\\r\\n help='You need to supply an Excel file that will be taken as template to create the XLSX file with the checklist')\\r\\nparser.add_argument('--json-output-folder', dest='json_output_folder', action='store',\\r\\n help='Folder where to store the JSON output')\\r\\nparser.add_argument('--xlsx-output-folder', dest='xlsx_output_folder', action='store',\\r\\n help='Folder where to store the macro free Excel output')\\r\\nparser.add_argument('--output-name', dest='output_name', action='store',\\r\\n help='File name (without extension) for the output files (.json and .xlsx extensions will be added automatically)')\\r\\nparser.add_argument('--verbose', dest='verbose', action='store_true',\\r\\n default=False,\\r\\n help='run in verbose mode (default: False)')\\r\\nargs = parser.parse_args()\\r\\n\\r\\n# Consolidate all checklists into one big checklist object\\r\\ndef get_consolidated_checklist(input_folder, language):\\r\\n # Initialize checklist object\\r\\n checklist_master_data = {\\r\\n 'items': [],\\r\\n 'metadata': {\\r\\n 'name': 'Master checklist',\\r\\n 'timestamp': datetime.date.today().strftime(\\\"%B %d, %Y\\\")\\r\\n }\\r\\n }\\r\\n # Find all files in the input folder matching the pattern \\\"language*.json\\\"\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: looking for JSON files in folder\\\", input_folder, \\\"with pattern *.\\\", language + \\\".json...\\\")\\r\\n checklist_files = glob.glob(input_folder + \\\"/*.\\\" + language + \\\".json\\\")\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: found\\\", len(checklist_files), \\\"JSON files\\\")\\r\\n for checklist_file in checklist_files:\\r\\n # Get JSON\\r\\n try:\\r\\n with open(checklist_file) as f:\\r\\n checklist_data = json.load(f)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: JSON file\\\", checklist_file, \\\"loaded successfully with {0} items\\\".format(len(checklist_data[\\\"items\\\"])))\\r\\n for item in checklist_data[\\\"items\\\"]:\\r\\n # Add field with the name of the checklist\\r\\n item[\\\"checklist\\\"] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\\n # Add items to the master checklist\\r\\n checklist_master_data['items'] += checklist_data['items']\\r\\n # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\\n checklist_master_data['severities'] = checklist_data['severities']\\r\\n checklist_master_data['status'] = checklist_data['status']\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when processing JSON file\\\", checklist_file, \\\"-\\\", str(e))\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: master checklist contains\\\", len(checklist_master_data[\\\"items\\\"]), \\\"items\\\")\\r\\n return checklist_master_data\\r\\n\\r\\n# Dump JSON object to file\\r\\ndef dump_json_file(json_object, filename):\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: dumping JSON object to file\\\", filename)\\r\\n json_string = json.dumps(json_object, sort_keys=True, ensure_ascii=False, indent=4, separators=(',', ': '))\\r\\n with open(filename, 'w', encoding='utf-8') as f:\\r\\n f.write(json_string)\\r\\n f.close()\\r\\n\\r\\n# Main function\\r\\ndef update_excel_file(input_excel_file, output_excel_file, checklist_data):\\r\\n # Constants\\r\\n worksheet_checklist_name = 'Checklist'\\r\\n row1 = 8 # First row after which the Excel spreadsheet will be updated\\r\\n col_checklist_name = \\\"A\\\"\\r\\n row_checklist_name = \\\"4\\\"\\r\\n guid_column_index = \\\"L\\\"\\r\\n comment_column_index = \\\"G\\\"\\r\\n sample_cell_index = 'A4'\\r\\n col_checklist=\\\"A\\\"\\r\\n col_area = \\\"B\\\"\\r\\n col_subarea = \\\"C\\\"\\r\\n col_check = \\\"D\\\"\\r\\n col_desc = \\\"E\\\"\\r\\n col_sev = \\\"F\\\"\\r\\n col_status = \\\"G\\\"\\r\\n col_comment = \\\"H\\\"\\r\\n col_link = \\\"I\\\"\\r\\n col_training = \\\"J\\\"\\r\\n col_arg_success = \\\"K\\\"\\r\\n col_arg_failure = \\\"L\\\"\\r\\n col_guid = \\\"M\\\"\\r\\n info_link_text = 'More info'\\r\\n training_link_text = 'Training'\\r\\n worksheet_values_name = 'Values'\\r\\n values_row1 = 2\\r\\n col_values_severity = \\\"A\\\"\\r\\n col_values_status = \\\"B\\\"\\r\\n col_values_area = \\\"C\\\"\\r\\n col_values_description = \\\"H\\\"\\r\\n\\r\\n # Load workbook\\r\\n try:\\r\\n wb = load_workbook(filename = input_excel_file)\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: workbook\\\", input_excel_file, \\\"opened successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when opening Excel file\\\", input_excel_file, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Get worksheet\\r\\n try:\\r\\n ws = wb[worksheet_checklist_name]\\r\\n if args.verbose:\\r\\n print(\\\"DEBUG: worksheet\\\", worksheet_checklist_name, \\\"selected successfully\\\")\\r\\n except Exception as e:\\r\\n print(\\\"ERROR: Error when selecting worksheet\\\", worksheet_checklist_name, \\\"-\\\", str(e))\\r\\n sys.exit(1)\\r\\n\\r\\n # Set checklist name\\r\\n try:\\r\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":[" dump_json_file(checklist_master_data, json_output_file)\r"," update_excel_file(args.excel_file, xlsx_output_file, checklist_master_data)\r"," checklist_master_data = get_consolidated_checklist(args.input_folder, args.language)\r"," wb.save(output_excel_file)\r"," status_list = checklist_data.get(\"status\")\r"," default_status = status_list[0].get(\"name\")\r"," for item in checklist_data.get(\"items\"):\r"," checklist_name = item.get(\"checklist\")\r"," guid = item.get(\"guid\")\r"," category = item.get(\"category\")\r"," subcategory = item.get(\"subcategory\")\r"," text = item.get(\"text\")\r"," description = item.get(\"description\")\r"," severity = item.get(\"severity\")\r"," link = item.get(\"link\")\r"," training = item.get(\"training\")\r"," graph_query_success = item.get(\"graph_success\")\r"," graph_query_failure = item.get(\"graph_failure\")\r"," checklist_data = json.load(f)\r"," for item in checklist_data.get(\"status\"):\r"," status = item.get(\"name\")\r"," for item in checklist_data.get(\"severities\"):\r"," severity = item.get(\"name\")\r"," # Get worksheet\r"," print(\"DEBUG: saving workbook\", output_excel_file)\r","######################################################################\r"," # else:\r"," # Replace the master checklist severities and status sections (for a given language they should be all the same)\r"," ws[col_guid + str(row_counter)].value = guid\r"," guid_column_index = \"L\"\r"," col_subarea = \"C\"\r","parser.add_argument('--language', dest='language', action='store', default='en',\r"," ws[col_checklist_name + row_checklist_name] = checklist_data[\"metadata\"][\"name\"]\r"],"string":"[\n \" dump_json_file(checklist_master_data, json_output_file)\\r\",\n \" update_excel_file(args.excel_file, xlsx_output_file, checklist_master_data)\\r\",\n \" checklist_master_data = get_consolidated_checklist(args.input_folder, args.language)\\r\",\n \" wb.save(output_excel_file)\\r\",\n \" status_list = checklist_data.get(\\\"status\\\")\\r\",\n \" default_status = status_list[0].get(\\\"name\\\")\\r\",\n \" for item in checklist_data.get(\\\"items\\\"):\\r\",\n \" checklist_name = item.get(\\\"checklist\\\")\\r\",\n \" guid = item.get(\\\"guid\\\")\\r\",\n \" category = item.get(\\\"category\\\")\\r\",\n \" subcategory = item.get(\\\"subcategory\\\")\\r\",\n \" text = item.get(\\\"text\\\")\\r\",\n \" description = item.get(\\\"description\\\")\\r\",\n \" severity = item.get(\\\"severity\\\")\\r\",\n \" link = item.get(\\\"link\\\")\\r\",\n \" training = item.get(\\\"training\\\")\\r\",\n \" graph_query_success = item.get(\\\"graph_success\\\")\\r\",\n \" graph_query_failure = item.get(\\\"graph_failure\\\")\\r\",\n \" checklist_data = json.load(f)\\r\",\n \" for item in checklist_data.get(\\\"status\\\"):\\r\",\n \" status = item.get(\\\"name\\\")\\r\",\n \" for item in checklist_data.get(\\\"severities\\\"):\\r\",\n \" severity = item.get(\\\"name\\\")\\r\",\n \" # Get worksheet\\r\",\n \" print(\\\"DEBUG: saving workbook\\\", output_excel_file)\\r\",\n \"######################################################################\\r\",\n \" # else:\\r\",\n \" # Replace the master checklist severities and status sections (for a given language they should be all the same)\\r\",\n \" ws[col_guid + str(row_counter)].value = guid\\r\",\n \" guid_column_index = \\\"L\\\"\\r\",\n \" col_subarea = \\\"C\\\"\\r\",\n \"parser.add_argument('--language', dest='language', action='store', default='en',\\r\",\n \" ws[col_checklist_name + row_checklist_name] = checklist_data[\\\"metadata\\\"][\\\"name\\\"]\\r\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":206,"cells":{"repo_id":{"kind":"number","value":16,"string":"16"},"repo_name":{"kind":"string","value":"qiboteam__qibocal"},"project_context":{"kind":"string","value":"qiboteam__qibocal METASEP\n\ndoc/source/conf.py METASEP\n# -*- coding: utf-8 -*-\n# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\nfrom recommonmark.transform import AutoStructify\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nimport qcvv\n\n# -- Project information -----------------------------------------------------\n\nproject = \"qcvv\"\ncopyright = \"2022, The Qibo team\"\nauthor = \"The Qibo team\"\n\n# The full version, including alpha/beta/rc tags\nrelease = qcvv.__version__\n\n\n# -- General configuration ---------------------------------------------------\n\n# https://stackoverflow.com/questions/56336234/build-fail-sphinx-error-contents-rst-not-found\nmaster_doc = \"index\"\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.doctest\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"recommonmark\",\n \"sphinx_markdown_tables\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# Markdown configuration\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\nsource_suffix = {\".rst\": \"restructuredtext\", \".txt\": \"markdown\", \".md\": \"markdown\"}\n\nautosectionlabel_prefix_document = True\n# Allow to embed rst syntax in markdown files.\nenable_eval_rst = True\n\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = []\n\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"sphinx_rtd_theme\"\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = []\n\n# -- Intersphinx -------------------------------------------------------------\n\nintersphinx_mapping = {\"python\": (\"https://docs.python.org/3\", None)}\n\n# -- Autodoc ------------------------------------------------------------------\n#\nautodoc_member_order = \"bysource\"\n\n# Adapted this from\n# https://github.com/readthedocs/recommonmark/blob/ddd56e7717e9745f11300059e4268e204138a6b1/docs/conf.py\n# app setup hook\ndef setup(app):\n app.add_config_value(\"recommonmark_config\", {\"enable_eval_rst\": True}, True)\n app.add_transform(AutoStructify)\n\nserverscripts/qcvv-update-on-change.py METASEP\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport argparse\n\nimport curio\nimport inotify.adapters\nimport inotify.constants\nfrom curio import subprocess\n\n\nasync def main(folder, exe_args):\n i = inotify.adapters.Inotify()\n i.add_watch(folder)\n\n for event in i.event_gen(yield_nones=False):\n if event is not None:\n (header, _, _, _) = event\n if (\n (header.mask & inotify.constants.IN_CREATE)\n or (header.mask & inotify.constants.IN_DELETE)\n or (header.mask & inotify.constants.IN_MODIFY)\n ):\n await subprocess.run(exe_args)\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\"folder\")\n parser.add_argument(\"exe_args\", nargs=\"+\")\n args = parser.parse_args()\n curio.run(main(args.folder, args.exe_args))\n\nserverscripts/qcvv-index.reports.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"qcvv-index-reports.py\nGenerates a JSON index with reports information.\n\"\"\"\nimport json\nimport pathlib\nimport sys\nfrom collections import ChainMap\n\nimport yaml\n\nROOT = \"/home/users/qcvv/qcvv-reports\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\nOUT = \"/home/users/qcvv/qcvv-reports/index.json\"\nDEFAULTS = {\n \"title\": \"-\",\n \"date\": \"-\",\n \"platform\": \"-\",\n \"start-time\": \"-\",\n \"end-time\": \"-\",\n}\nREQUIRED_FILE_METADATA = {\"title\", \"date\", \"platform\", \"start-time\" \"end-time\"}\n\n\ndef meta_from_path(p):\n meta = ChainMap(DEFAULTS)\n yaml_meta = p / \"meta.yml\"\n yaml_res = {}\n if yaml_meta.exists():\n with yaml_meta.open() as f:\n try:\n yaml_res = yaml.safe_load(f)\n except yaml.YAMLError as e:\n print(f\"Error processing {yaml_meta}: {e}\", file=sys.stderr)\n meta = meta.new_child(yaml_res)\n return meta\n\n\ndef register(p):\n path_meta = meta_from_path(p)\n title, date, platform, start_time, end_time = (\n path_meta[\"title\"],\n path_meta[\"date\"],\n path_meta[\"platform\"],\n path_meta[\"start-time\"],\n path_meta[\"end-time\"],\n )\n url = ROOT_URL + p.name\n titlelink = f'{title}'\n return (titlelink, date, platform, start_time, end_time)\n\n\ndef make_index():\n root_path = pathlib.Path(ROOT)\n data = []\n for p in root_path.iterdir():\n if p.is_dir():\n try:\n res = register(p)\n data.append(res)\n except:\n print(\"Error processing folder\", p, file=sys.stderr)\n raise\n\n with open(OUT, \"w\") as f:\n json.dump({\"data\": data}, f)\n\n\nif __name__ == \"__main__\":\n make_index()\n\nsrc/qcvv/web/server.py METASEP\n# -*- coding: utf-8 -*-\nimport os\nimport pathlib\n\nimport yaml\nfrom flask import Flask, render_template\n\nfrom qcvv import __version__\nfrom qcvv.cli.builders import ReportBuilder\n\nserver = Flask(__name__)\n\n\n@server.route(\"/\")\n@server.route(\"/data/\")\ndef page(path=None):\n folders = [\n folder\n for folder in reversed(sorted(os.listdir(os.getcwd())))\n if os.path.isdir(folder) and \"meta.yml\" in os.listdir(folder)\n ]\n\n report = None\n if path is not None:\n try:\n report = ReportBuilder(path)\n except (FileNotFoundError, TypeError):\n pass\n\n return render_template(\n \"template.html\",\n version=__version__,\n folders=folders,\n report=report,\n )\n\nsrc/qcvv/web/report.py METASEP\n# -*- coding: utf-8 -*-\nimport os\nimport pathlib\n\nfrom jinja2 import Environment, FileSystemLoader\n\nfrom qcvv import __version__\nfrom qcvv.cli.builders import ReportBuilder\n\n\ndef create_report(path):\n \"\"\"Creates an HTML report for the data in the given path.\"\"\"\n filepath = pathlib.Path(__file__)\n\n with open(os.path.join(filepath.with_name(\"static\"), \"styles.css\"), \"r\") as file:\n css_styles = f\"\"\n\n report = ReportBuilder(path)\n env = Environment(loader=FileSystemLoader(filepath.with_name(\"templates\")))\n template = env.get_template(\"template.html\")\n\n html = template.render(\n is_static=True,\n css_styles=css_styles,\n version=__version__,\n report=report,\n )\n\n with open(os.path.join(path, \"index.html\"), \"w\") as file:\n file.write(html)\n\nsrc/qcvv/web/app.py METASEP\n# -*- coding: utf-8 -*-\nimport os\n\nimport pandas as pd\nimport yaml\nfrom dash import Dash, Input, Output, dcc, html\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.web.server import server\n\nDataset() # dummy dataset call to suppress ``pint[V]`` error\n\napp = Dash(\n server=server,\n suppress_callback_exceptions=True,\n)\n\napp.layout = html.Div(\n [\n dcc.Location(id=\"url\", refresh=False),\n dcc.Graph(id=\"graph\", figure={}),\n dcc.Interval(\n id=\"interval\",\n # TODO: Perhaps the user should be allowed to change the refresh rate\n interval=1000,\n n_intervals=0,\n disabled=False,\n ),\n ]\n)\n\n\n@app.callback(\n Output(\"graph\", \"figure\"),\n Input(\"interval\", \"n_intervals\"),\n Input(\"graph\", \"figure\"),\n Input(\"url\", \"pathname\"),\n)\ndef get_graph(n, current_figure, url):\n method, folder, routine, qubit, format = url.split(os.sep)[2:]\n try:\n # data = Dataset.load_data(folder, routine, format, \"precision_sweep\")\n # with open(f\"{folder}/platform.yml\", \"r\") as f:\n # nqubits = yaml.safe_load(f)[\"nqubits\"]\n # if len(data) > 2:\n # params, fit = resonator_spectroscopy_fit(folder, format, nqubits)\n # else:\n # params, fit = None, None\n # return getattr(plots.resonator_spectroscopy, method)(data, params, fit)\n\n # # FIXME: Temporarily hardcode the plotting method to test\n # # multiple routines with different names in one folder\n # # should be changed to:\n # # return getattr(getattr(plots, routine), method)(data)\n\n return getattr(plots, method)(folder, routine, qubit, format)\n except (FileNotFoundError, pd.errors.EmptyDataError):\n return current_figure\n\nsrc/qcvv/web/__init__.py METASEP\n\nsrc/qcvv/tests/test_data.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Some tests for the Dataset class\"\"\"\nimport tempfile\n\nimport numpy as np\nimport pytest\nfrom pint import DimensionalityError, UndefinedUnitError\n\nfrom qcvv.data import Dataset\n\n\ndef random_dataset(length):\n data = Dataset()\n for _ in range(length):\n msr, i, q, phase = np.random.rand(len(data.df.columns))\n data.add({\"MSR[V]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n return data\n\n\ndef test_data_initialization():\n \"\"\"Test Dataset constructor\"\"\"\n data = Dataset()\n assert len(data.df.columns) == 4\n assert list(data.df.columns) == [\"MSR\", \"i\", \"q\", \"phase\"]\n\n data1 = Dataset(quantities={\"attenuation\": \"dB\"})\n assert len(data1.df.columns) == 5\n assert list(data1.df.columns) == [\"attenuation\", \"MSR\", \"i\", \"q\", \"phase\"]\n\n\ndef test_units():\n \"\"\"Test units of measure in Dataset\"\"\"\n data = Dataset()\n assert data.df.MSR.values.units == \"volt\"\n\n data1 = Dataset(quantities={\"frequency\": \"Hz\"})\n assert data1.df.frequency.values.units == \"hertz\"\n\n with pytest.raises(UndefinedUnitError):\n data2 = Dataset(quantities={\"fake_unit\": \"fake\"})\n\n\ndef test_add():\n \"\"\"Test add method of Dataset\"\"\"\n data = random_dataset(5)\n assert len(data) == 5\n\n data1 = Dataset(quantities={\"attenuation\": \"dB\"})\n msr, i, q, phase, att = np.random.rand(len(data1.df.columns))\n data1.add(\n {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"attenuation[dB]\": att,\n }\n )\n assert len(data1) == 1\n\n data1.add(\n {\n \"MSR[V]\": 0,\n \"i[V]\": 0.0,\n \"q[V]\": 0.0,\n \"phase[deg]\": 0,\n \"attenuation[dB]\": 1,\n }\n )\n assert len(data1) == 2\n\n data2 = Dataset()\n msr, i, q, phase = np.random.rand(len(data2.df.columns))\n with pytest.raises(DimensionalityError):\n data2.add({\"MSR[dB]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\n with pytest.raises(UndefinedUnitError):\n data2.add({\"MSR[test]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\nsrc/qcvv/plots/scatters.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qcvv.data import Data, Dataset\nfrom qcvv.fitting.utils import exp, flipping, lorenzian, rabi, ramsey\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = Dataset.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = Dataset(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = Dataset.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = Dataset(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.df[\"fit_amplitude\"][0],\n data_fit.df[\"fit_center\"][0],\n data_fit.df[\"fit_sigma\"][0],\n data_fit.df[\"fit_offset\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\n# For Rabi oscillations\ndef time_msr_phase(folder, routine, qubit, format):\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"Time\": \"ns\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"Time\", \"ns\")),\n max(data.get_values(\"Time\", \"ns\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=rabi(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n data_fit.df[\"popt4\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n # add annotation for label[0] -> pi_pulse_duration\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} ns.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n # add annotation for label[0] -> rabi_oscillations_pi_pulse_max_voltage\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # add annotation for label[0] -> rabi_oscillations_pi_pulse_max_voltage\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.1f} ns.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Time (ns)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef gain_msr_phase(folder, routine, qubit, format):\n\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"gain\", \"dimensionless\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"gain\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"gain\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"gain\", \"dimensionless\")),\n max(data.get_values(\"gain\", \"dimensionless\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=rabi(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n data_fit.df[\"popt4\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n # add annotation for label[0] -> pi_pulse_gain\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.1f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Gain (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\n\ndef amplitude_msr_phase(folder, routine, qubit, format):\n\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"amplitude\", \"dimensionless\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"amplitude\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"amplitude\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"amplitude\", \"dimensionless\")),\n max(data.get_values(\"amplitude\", \"dimensionless\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=rabi(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n data_fit.df[\"popt4\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n # add annotation for label[0] -> pi_pulse_gain\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.1f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Amplitude (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\n\n# For Ramsey oscillations\ndef time_msr(folder, routine, qubit, format):\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\"MSR (V)\",),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"wait\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Ramsey\",\n ),\n row=1,\n col=1,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"wait\", \"ns\")),\n max(data.get_values(\"wait\", \"ns\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=ramsey(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n data_fit.df[\"popt4\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} ns\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.3f} Hz\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\n\n# T1\ndef t1_time_msr_phase(folder, routine, qubit, format):\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"Time\": \"ns\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"T1\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"T1\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"Time\", \"ns\")),\n max(data.get_values(\"Time\", \"ns\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=exp(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} ns.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Time (ns)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\n# Flipping\ndef flips_msr_phase(folder, routine, qubit, format):\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"flips\": \"dimensionless\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"flips\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"T1\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"flips\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"T1\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"flips\", \"dimensionless\")),\n max(data.get_values(\"flips\", \"dimensionless\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=flipping(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.4f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Flips (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Flips (dimensionless)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\n# For calibrate qubit states\ndef exc_gnd(folder, routine, qubit, format):\n\n import os.path\n\n file_exc = f\"{folder}/data/{routine}/data_exc_q{qubit}.csv\"\n if os.path.exists(file_exc):\n data_exc = Dataset.load_data(folder, routine, format, f\"data_exc_q{qubit}\")\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\"Calibrate qubit states\",),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_exc.get_values(\"i\", \"V\"),\n y=data_exc.get_values(\"q\", \"V\"),\n name=\"exc_state\",\n mode=\"markers\",\n marker=dict(size=3, color=\"lightcoral\"),\n ),\n row=1,\n col=1,\n )\n\n file_gnd = f\"{folder}/data/{routine}/data_gnd_q{qubit}.csv\"\n if os.path.exists(file_gnd):\n data_gnd = Dataset.load_data(folder, routine, format, f\"data_gnd_q{qubit}\")\n\n fig.add_trace(\n go.Scatter(\n x=data_gnd.get_values(\"i\", \"V\"),\n y=data_gnd.get_values(\"q\", \"V\"),\n name=\"gnd state\",\n mode=\"markers\",\n marker=dict(size=3, color=\"skyblue\"),\n ),\n row=1,\n col=1,\n )\n\n file_exc = f\"{folder}/data/{routine}/data_exc_q{qubit}.csv\"\n if os.path.exists(file_exc):\n i_exc = data_exc.get_values(\"i\", \"V\")\n q_exc = data_exc.get_values(\"q\", \"V\")\n\n i_mean_exc = i_exc.mean()\n q_mean_exc = q_exc.mean()\n iq_mean_exc = complex(i_mean_exc, q_mean_exc)\n mod_iq_exc = abs(iq_mean_exc) * 1e6\n\n fig.add_trace(\n go.Scatter(\n x=[i_mean_exc],\n y=[q_mean_exc],\n name=f\" state1_voltage: {mod_iq_exc}
mean_exc_state: {iq_mean_exc}\",\n mode=\"markers\",\n marker=dict(size=10, color=\"red\"),\n ),\n row=1,\n col=1,\n )\n\n file_gnd = f\"{folder}/data/{routine}/data_gnd_q{qubit}.csv\"\n if os.path.exists(file_gnd):\n i_gnd = data_gnd.get_values(\"i\", \"V\")\n q_gnd = data_gnd.get_values(\"q\", \"V\")\n\n i_mean_gnd = i_gnd.mean()\n q_mean_gnd = q_gnd.mean()\n iq_mean_gnd = complex(i_mean_gnd, q_mean_gnd)\n mod_iq_gnd = abs(iq_mean_gnd) * 1e6\n\n fig.add_trace(\n go.Scatter(\n x=[i_mean_gnd],\n y=[q_mean_gnd],\n name=f\" state0_voltage: {mod_iq_gnd}
mean_gnd_state: {iq_mean_gnd}\",\n mode=\"markers\",\n marker=dict(size=10, color=\"blue\"),\n ),\n row=1,\n col=1,\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"i (V)\",\n yaxis_title=\"q (V)\",\n width=1000\n )\n\n return fig\nsrc/qcvv/plots/heatmaps.py METASEP\n# -*- coding: utf-8 -*-\nimport os.path\n\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qcvv.data import Dataset\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef duration_gain_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"gain\", \"dimensionless\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"gain\", \"dimensionless\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"duration (ns)\",\n yaxis_title=\"gain (dimensionless)\",\n xaxis2_title=\"duration (ns)\",\n yaxis2_title=\"gain (dimensionless)\",\n )\n return fig\n\n\ndef duration_amplitude_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"amplitude\", \"dimensionless\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"amplitude\", \"dimensionless\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"duration (ns)\",\n yaxis_title=\"amplitude (dimensionless)\",\n xaxis2_title=\"duration (ns)\",\n yaxis2_title=\"amplitude (dimensionless)\",\n )\n return fig\n\nsrc/qcvv/plots/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom qcvv.plots.heatmaps import *\nfrom qcvv.plots.scatters import *\n\nsrc/qcvv/fitting/utils.py METASEP\n# -*- coding: utf-8 -*-\nimport re\n\nimport numpy as np\n\n\ndef lorenzian(frequency, amplitude, center, sigma, offset):\n # http://openafox.com/science/peak-function-derivations.html\n return (amplitude / np.pi) * (\n sigma / ((frequency - center) ** 2 + sigma**2)\n ) + offset\n\n\ndef rabi(x, p0, p1, p2, p3, p4):\n # A fit to Superconducting Qubit Rabi Oscillation\n # Offset : p[0]\n # Oscillation amplitude : p[1]\n # Period T : 1/p[2]\n # Phase : p[3]\n # Arbitrary parameter T_2 : 1/p[4]\n # return p[0] + p[1] * np.sin(2 * np.pi / p[2] * x + p[3]) * np.exp(-x / p[4])\n return p0 + p1 * np.sin(2 * np.pi * x * p2 + p3) * np.exp(-x * p4)\n\n\ndef ramsey(x, p0, p1, p2, p3, p4):\n # A fit to Superconducting Qubit Rabi Oscillation\n # Offset : p[0]\n # Oscillation amplitude : p[1]\n # DeltaFreq : p[2]\n # Phase : p[3]\n # Arbitrary parameter T_2 : 1/p[4]\n # return p[0] + p[1] * np.sin(2 * np.pi / p[2] * x + p[3]) * np.exp(-x / p[4])\n return p0 + p1 * np.sin(2 * np.pi * x * p2 + p3) * np.exp(-x * p4)\n\n\ndef exp(x, *p):\n return p[0] - p[1] * np.exp(-1 * x * p[2])\n\n\ndef flipping(x, p0, p1, p2, p3):\n # A fit to Flipping Qubit oscillation\n # Epsilon?? shoule be Amplitude : p[0]\n # Offset : p[1]\n # Period of oscillation : p[2]\n # phase for the first point corresponding to pi/2 rotation : p[3]\n return np.sin(x * 2 * np.pi / p2 + p3) * p0 + p1\n # return p0 * np.sin(p3 + (2 * np.pi * x) / p2) + p1\n\n\ndef parse(key):\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n return name, unit\n\nsrc/qcvv/fitting/methods.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qcvv.config import log\nfrom qcvv.data import Data\nfrom qcvv.fitting.utils import exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n\nsrc/qcvv/fitting/__init__.py METASEP\n\nsrc/qcvv/cli/builders.py METASEP\n# -*- coding: utf-8 -*-\nimport datetime\nimport inspect\nimport os\nimport shutil\n\nimport yaml\n\nfrom qcvv import calibrations\nfrom qcvv.config import log, raise_error\nfrom qcvv.data import Data\n\n\ndef load_yaml(path):\n \"\"\"Load yaml file from disk.\"\"\"\n with open(path, \"r\") as file:\n data = yaml.safe_load(file)\n return data\n\n\nclass ActionBuilder:\n \"\"\"Class for parsing and executing runcards.\n\n Args:\n runcard (path): path containing the runcard.\n folder (path): path for the output folder.\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n\n def __init__(self, runcard, folder=None, force=False):\n path, self.folder = self._generate_output_folder(folder, force)\n self.runcard = load_yaml(runcard)\n platform_name = self.runcard[\"platform\"]\n self._allocate_platform(platform_name)\n self.qubits = self.runcard[\"qubits\"]\n self.format = self.runcard[\"format\"]\n\n # Saving runcard\n self.save_runcards(path, runcard)\n self.save_meta(path, self.folder, platform_name)\n\n @staticmethod\n def _generate_output_folder(folder, force):\n \"\"\"Static method for generating the output folder.\n\n Args:\n folder (path): path for the output folder. If None it will be created a folder automatically\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n if folder is None:\n import getpass\n\n e = datetime.datetime.now()\n user = getpass.getuser().replace(\".\", \"-\")\n date = e.strftime(\"%Y-%m-%d\")\n folder = f\"{date}-{'000'}-{user}\"\n num = 0\n while os.path.exists(folder):\n log.warning(f\"Directory {folder} already exists.\")\n num += 1\n folder = f\"{date}-{str(num).rjust(3, '0')}-{user}\"\n log.warning(f\"Trying to create directory {folder}\")\n elif os.path.exists(folder) and not force:\n raise_error(RuntimeError, f\"Directory {folder} already exists.\")\n elif os.path.exists(folder) and force:\n log.warning(f\"Deleting previous directory {folder}.\")\n shutil.rmtree(os.path.join(os.getcwd(), folder))\n\n path = os.path.join(os.getcwd(), folder)\n log.info(f\"Creating directory {folder}.\")\n os.makedirs(path)\n return path, folder\n\n def _allocate_platform(self, platform_name):\n \"\"\"Allocate the platform using Qibolab.\"\"\"\n from qibo.backends import construct_backend\n\n self.platform = construct_backend(\"qibolab\", platform=platform_name).platform\n\n def save_runcards(self, path, runcard):\n \"\"\"Save the output runcards.\"\"\"\n from qibolab.paths import qibolab_folder\n\n platform_runcard = (\n qibolab_folder / \"runcards\" / f\"{self.runcard['platform']}.yml\"\n )\n shutil.copy(platform_runcard, f\"{path}/platform.yml\")\n shutil.copy(runcard, f\"{path}/runcard.yml\")\n\n def save_meta(self, path, folder, platform_name):\n import qibo\n import qibolab\n\n import qcvv\n\n e = datetime.datetime.now(datetime.timezone.utc)\n meta = {}\n meta[\"title\"] = folder\n meta[\"platform\"] = platform_name\n meta[\"date\"] = e.strftime(\"%Y-%m-%d\")\n meta[\"start-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"versions\"] = {\n \"qibo\": qibo.__version__,\n \"qibolab\": qibolab.__version__,\n \"qcvv\": qcvv.__version__,\n }\n with open(f\"{path}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n def _build_single_action(self, name):\n \"\"\"Helper method to parse the actions in the runcard.\"\"\"\n f = getattr(calibrations, name)\n path = os.path.join(self.folder, f\"data/{name}/\")\n os.makedirs(path)\n sig = inspect.signature(f)\n params = self.runcard[\"actions\"][name]\n for param in list(sig.parameters)[2:-1]:\n if param not in params:\n raise_error(AttributeError, f\"Missing parameter {param} in runcard.\")\n return f, params, path\n\n def execute(self):\n \"\"\"Method to execute sequentially all the actions in the runcard.\"\"\"\n self.platform.connect()\n self.platform.setup()\n self.platform.start()\n for action in self.runcard[\"actions\"]:\n routine, args, path = self._build_single_action(action)\n self._execute_single_action(routine, args, path)\n self.platform.stop()\n self.platform.disconnect()\n\n def _execute_single_action(self, routine, arguments, path):\n \"\"\"Method to execute a single action and retrieving the results.\"\"\"\n for qubit in self.qubits:\n results = routine(self.platform, qubit, **arguments)\n if self.format is None:\n raise_error(\n ValueError, f\"Cannot store data using {self.format} format.\"\n )\n for data in results:\n getattr(data, f\"to_{self.format}\")(path)\n\n self.update_platform_runcard(qubit, routine.__name__)\n\n def update_platform_runcard(self, qubit, routine):\n\n try:\n data_fit = Data.load_data(\n self.folder, routine, self.format, f\"fit_q{qubit}\"\n )\n except:\n data_fit = Data()\n\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n settings = load_yaml(f\"{self.folder}/platform.yml\")\n\n for param in params:\n settings[\"characterization\"][\"single_qubit\"][qubit][param] = int(\n data_fit.df[param][0]\n )\n\n with open(f\"{self.folder}/data/{routine}/platform.yml\", \"a+\") as file:\n yaml.dump(\n settings, file, sort_keys=False, indent=4, default_flow_style=None\n )\n\n def dump_report(self):\n from qcvv.web.report import create_report\n\n # update end time\n meta = load_yaml(f\"{self.folder}/meta.yml\")\n e = datetime.datetime.now(datetime.timezone.utc)\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n with open(f\"{self.folder}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n create_report(self.folder)\n\n\nclass ReportBuilder:\n \"\"\"Parses routines and plots to report and live plotting page.\n\n Args:\n path (str): Path to the data folder to generate report for.\n \"\"\"\n\n def __init__(self, path):\n self.path = path\n self.metadata = load_yaml(os.path.join(path, \"meta.yml\"))\n\n # find proper path title\n base, self.title = os.path.join(os.getcwd(), path), \"\"\n while self.title in (\"\", \".\"):\n base, self.title = os.path.split(base)\n\n self.runcard = load_yaml(os.path.join(path, \"runcard.yml\"))\n self.format = self.runcard.get(\"format\")\n self.qubits = self.runcard.get(\"qubits\")\n\n # create calibration routine objects\n # (could be incorporated to :meth:`qcvv.cli.builders.ActionBuilder._build_single_action`)\n self.routines = []\n for action in self.runcard.get(\"actions\"):\n routine = getattr(calibrations, action)\n if not hasattr(routine, \"plots\"):\n routine.plots = []\n self.routines.append(routine)\n\n def get_routine_name(self, routine):\n \"\"\"Prettify routine's name for report headers.\"\"\"\n return routine.__name__.replace(\"_\", \" \").title()\n\n def get_figure(self, routine, method, qubit):\n \"\"\"Get html figure for report.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n import tempfile\n\n figure = method(self.path, routine.__name__, qubit, self.format)\n with tempfile.NamedTemporaryFile() as temp:\n figure.write_html(temp.name, include_plotlyjs=False, full_html=False)\n fightml = temp.read().decode(\"utf-8\")\n return fightml\n\n def get_live_figure(self, routine, method, qubit):\n \"\"\"Get url to dash page for live plotting.\n\n This url is used by :meth:`qcvv.web.app.get_graph`.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n return os.path.join(\n method.__name__,\n self.path,\n routine.__name__,\n str(qubit),\n self.format,\n )\n\nsrc/qcvv/cli/_base.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Adds global CLI options.\"\"\"\nimport base64\nimport pathlib\nimport shutil\nimport socket\nimport subprocess\nimport uuid\nfrom urllib.parse import urljoin\n\nimport click\nfrom qibo.config import log, raise_error\n\nfrom qcvv.cli.builders import ActionBuilder\n\nCONTEXT_SETTINGS = dict(help_option_names=[\"-h\", \"--help\"])\n\n# options for report upload\nUPLOAD_HOST = (\n \"qcvv@localhost\"\n if socket.gethostname() == \"saadiyat\"\n else \"qcvv@login.qrccluster.com\"\n)\nTARGET_DIR = \"qcvv-reports/\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"runcard\", metavar=\"RUNCARD\", type=click.Path(exists=True))\n@click.option(\n \"folder\",\n \"-o\",\n type=click.Path(),\n help=\"Output folder. If not provided a standard name will generated.\",\n)\n@click.option(\n \"force\",\n \"-f\",\n is_flag=True,\n help=\"Use --force option to overwrite the output folder.\",\n)\ndef command(runcard, folder, force=None):\n\n \"\"\"qcvv: Quantum Calibration Verification and Validation using Qibo.\n\n Arguments:\n\n - RUNCARD: runcard with declarative inputs.\n \"\"\"\n\n action_builder = ActionBuilder(runcard, folder, force)\n action_builder.execute()\n action_builder.dump_report()\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.option(\n \"port\",\n \"-p\",\n \"--port\",\n default=8050,\n type=int,\n help=\"Localhost port to launch dash server.\",\n)\n@click.option(\n \"debug\",\n \"-d\",\n \"--debug\",\n is_flag=True,\n help=\"Launch server in debugging mode.\",\n)\ndef live_plot(port, debug):\n \"\"\"Real time plotting of calibration data on a dash server.\"\"\"\n import socket\n\n from qcvv.web.app import app\n\n # change port if it is already used\n while True:\n with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n if s.connect_ex((\"localhost\", port)) != 0:\n break\n port += 1\n\n app.run_server(debug=debug, port=port)\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"output_folder\", metavar=\"FOLDER\", type=click.Path(exists=True))\ndef upload(output_folder):\n \"\"\"Uploads output folder to server\"\"\"\n\n output_path = pathlib.Path(output_folder)\n\n # check the rsync command exists.\n if not shutil.which(\"rsync\"):\n raise_error(\n RuntimeError,\n \"Could not find the rsync command. Please make sure it is installed.\",\n )\n\n # check that we can authentica with a certificate\n ssh_command_line = (\n \"ssh\",\n \"-o\",\n \"PreferredAuthentications=publickey\",\n \"-q\",\n UPLOAD_HOST,\n \"exit\",\n )\n\n str_line = \" \".join(repr(ele) for ele in ssh_command_line)\n\n log.info(f\"Checking SSH connection to {UPLOAD_HOST}.\")\n\n try:\n subprocess.run(ssh_command_line, check=True)\n except subprocess.CalledProcessError as e:\n raise RuntimeError(\n (\n \"Could not validate the SSH key. \"\n \"The command\\n%s\\nreturned a non zero exit status. \"\n \"Please make sure that your public SSH key is on the server.\"\n )\n % str_line\n ) from e\n except OSError as e:\n raise RuntimeError(\n \"Could not run the command\\n{}\\n: {}\".format(str_line, e)\n ) from e\n\n log.info(\"Connection seems OK.\")\n\n # upload output\n randname = base64.urlsafe_b64encode(uuid.uuid4().bytes).decode()\n newdir = TARGET_DIR + randname\n\n rsync_command = (\n \"rsync\",\n \"-aLz\",\n \"--chmod=ug=rwx,o=rx\",\n f\"{output_path}/\",\n f\"{UPLOAD_HOST}:{newdir}\",\n )\n\n log.info(f\"Uploading output ({output_path}) to {UPLOAD_HOST}\")\n try:\n subprocess.run(rsync_command, check=True)\n except subprocess.CalledProcessError as e:\n msg = f\"Failed to upload output: {e}\"\n raise RuntimeError(msg) from e\n\n url = urljoin(ROOT_URL, randname)\n log.info(f\"Upload completed. The result is available at:\\n{url}\")\n\nsrc/qcvv/cli/__init__.py METASEP\n# -*- coding: utf-8 -*-\n\n\"\"\"CLI entry point.\"\"\"\nfrom ._base import command, live_plot, upload\n\nsrc/qcvv/decorators.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Decorators implementation.\"\"\"\nimport os\n\nfrom qcvv.config import raise_error\n\n\ndef plot(header, method):\n \"\"\"Decorator for adding plots in the report and live plotting page.\n\n Args:\n header (str): Header of the plot to use in the report.\n method (Callable): Plotting method defined under ``qcvv.plots``.\n \"\"\"\n\n def wrapped(f):\n if hasattr(f, \"plots\"):\n # insert in the beginning of the list to have\n # proper plot ordering in the report\n f.plots.insert(0, (header, method))\n else:\n f.plots = [(header, method)]\n return f\n\n return wrapped\n\nsrc/qcvv/data.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Implementation of Dataset class to store measurements.\"\"\"\n\nimport re\nfrom abc import abstractmethod\n\nimport pandas as pd\nimport pint_pandas\n\nfrom qcvv.config import raise_error\n\n\nclass AbstractDataset:\n def __init__(self, name=None):\n\n if name is None:\n self.name = \"data\"\n else:\n self.name = name\n\n self.df = pd.DataFrame()\n\n def __add__(self, data):\n self.df = pd.concat([self.df, data.df], ignore_index=True)\n return self\n\n @abstractmethod\n def add(self, data):\n raise_error(NotImplementedError)\n\n def __len__(self):\n \"\"\"Computes the length of the dataset.\"\"\"\n return len(self.df)\n\n @abstractmethod\n def load_data(cls, folder, routine, format, name):\n raise_error(NotImplementedError)\n\n @abstractmethod\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n if self.quantities == None:\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n else:\n self.df.pint.dequantify().to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\n\nclass Dataset(AbstractDataset):\n \"\"\"Class to store the data measured during the calibration routines.\n It is a wrapper to a pandas DataFrame with units of measure from the Pint\n library.\n\n Args:\n quantities (dict): dictionary containing additional quantities that the user\n may save other than the pulse sequence output. The keys are the name of the\n quantities and the corresponding values are the units of measure.\n \"\"\"\n\n def __init__(self, name=None, quantities=None):\n\n super().__init__(name=name)\n\n self.df = pd.DataFrame(\n {\n \"MSR\": pd.Series(dtype=\"pint[V]\"),\n \"i\": pd.Series(dtype=\"pint[V]\"),\n \"q\": pd.Series(dtype=\"pint[V]\"),\n \"phase\": pd.Series(dtype=\"pint[deg]\"),\n }\n )\n self.quantities = {\"MSR\": \"V\", \"i\": \"V\", \"q\": \"V\", \"phase\": \"deg\"}\n\n if quantities is not None:\n self.quantities.update(quantities)\n for name, unit in quantities.items():\n self.df.insert(0, name, pd.Series(dtype=f\"pint[{unit}]\"))\n\n from pint import UnitRegistry\n\n self.ureg = UnitRegistry()\n\n def add(self, data):\n \"\"\"Add a row to dataset.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n for key, value in data.items():\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n # TODO: find a better way to do this\n self.df.loc[l, name] = value * self.ureg(unit)\n\n def get_values(self, quantity, unit):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n unit (str): Unit of the returned values.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n return self.df[quantity].pint.to(unit).pint.magnitude\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n dataset (``Dataset``): dataset object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file, header=[0, 1])\n obj.df = obj.df.pint.quantify(level=-1)\n obj.df.pop(\"Unnamed: 0_level_0\")\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.pint.dequantify().to_csv(f\"{path}/{self.name}.csv\")\n\n\nclass Data(AbstractDataset):\n \"\"\"Class to store the data obtained from calibration routines.\n It is a wrapper to a pandas DataFrame.\n\n Args:\n quantities (dict): dictionary quantities to be saved.\n \"\"\"\n\n def __init__(self, name=None, quantities=None):\n\n super().__init__(name=name)\n\n if quantities is not None:\n self.quantities = quantities\n for name in quantities:\n self.df.insert(0, name, pd.Series(dtype=object))\n\n def add(self, data):\n \"\"\"Add a row to dataset.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n for key, value in data.items():\n self.df.loc[l, key] = value\n\n def get_values(self, quantity):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n return self.df[quantity]\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n dataset (``Dataset``): dataset object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file)\n obj.df.pop(\"Unnamed: 0\")\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\nsrc/qcvv/config.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Custom logger implemenation.\"\"\"\nimport logging\nimport os\n\n# Logging level from 0 (all) to 4 (errors) (see https://docs.python.org/3/library/logging.html#logging-levels)\nQCVV_LOG_LEVEL = 1\nif \"QCVV_LOG_LEVEL\" in os.environ: # pragma: no cover\n QCVV_LOG_LEVEL = 10 * int(os.environ.get(\"QCVV_LOG_LEVEL\"))\n\n\ndef raise_error(exception, message=None, args=None):\n \"\"\"Raise exception with logging error.\n\n Args:\n exception (Exception): python exception.\n message (str): the error message.\n \"\"\"\n log.error(message)\n if args:\n raise exception(message, args)\n else:\n raise exception(message)\n\n\n# Configuration for logging mechanism\nclass CustomHandler(logging.StreamHandler):\n \"\"\"Custom handler for logging algorithm.\"\"\"\n\n def format(self, record):\n \"\"\"Format the record with specific format.\"\"\"\n from qcvv import __version__\n\n fmt = f\"[Qcvv {__version__}|%(levelname)s|%(asctime)s]: %(message)s\"\n return logging.Formatter(fmt, datefmt=\"%Y-%m-%d %H:%M:%S\").format(record)\n\n\n# allocate logger object\nlog = logging.getLogger(__name__)\nlog.setLevel(QCVV_LOG_LEVEL)\nlog.addHandler(CustomHandler())\n\nsrc/qcvv/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom .cli import command, live_plot, upload\n\n\"\"\"qcvv: Quantum Calibration Verification and Validation using Qibo.\"\"\"\nimport importlib.metadata as im\n\n__version__ = im.version(__package__)\n\nsrc/qcvv/calibrations/utils.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\n\n\ndef variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n):\n \"\"\"Helper function for sweeps.\"\"\"\n return np.concatenate(\n (\n np.arange(-lowres_width, -highres_width, lowres_step),\n np.arange(-highres_width, highres_width, highres_step),\n np.arange(highres_width, lowres_width, lowres_step),\n )\n )\n\nsrc/qcvv/calibrations/t1.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import t1_fit\n\n\n@plot(\"MSR vs Time\", plots.t1_time_msr_phase)\ndef t1(\n platform: AbstractPlatform,\n qubit,\n delay_before_readout_start,\n delay_before_readout_end,\n delay_before_readout_step,\n software_averages,\n points=10,\n):\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n ro_wait_range = np.arange(\n delay_before_readout_start, delay_before_readout_end, delay_before_readout_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n count = 0\n for _ in range(software_averages):\n for wait in ro_wait_range:\n if count % points == 0 and count > 0:\n yield data\n yield t1_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"t1\"],\n )\n ro_pulse.start = qd_pulse.duration + wait\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": wait,\n }\n data.add(results)\n count += 1\n yield data\n\nsrc/qcvv/calibrations/resonator_spectroscopy.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.calibrations.utils import variable_resolution_scanrange\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef resonator_spectroscopy(\n platform: AbstractPlatform,\n qubit,\n lowres_width,\n lowres_step,\n highres_width,\n highres_step,\n precision_width,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n )\n + resonator_frequency\n )\n fast_sweep_data = Dataset(\n name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield fast_sweep_data\n yield lorentzian_fit(\n fast_sweep_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n fast_sweep_data.add(results)\n count += 1\n yield fast_sweep_data\n\n # FIXME: have live ploting work for multiple datasets saved\n\n if platform.resonator_type == \"3D\":\n resonator_frequency = fast_sweep_data.df.frequency[\n fast_sweep_data.df.MSR.index[fast_sweep_data.df.MSR.argmax()]\n ].magnitude\n avg_voltage = (\n np.mean(fast_sweep_data.df.MSR.values[: (lowres_width // lowres_step)])\n * 1e6\n )\n else:\n resonator_frequency = fast_sweep_data.df.frequency[\n fast_sweep_data.df.MSR.index[fast_sweep_data.df.MSR.argmin()]\n ].magnitude\n avg_voltage = (\n np.mean(fast_sweep_data.df.MSR.values[: (lowres_width // lowres_step)])\n * 1e6\n )\n\n precision_sweep__data = Dataset(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(-precision_width, precision_width, precision_step)\n + resonator_frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield precision_sweep__data\n yield lorentzian_fit(\n fast_sweep_data + precision_sweep__data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n precision_sweep__data.add(results)\n count += 1\n yield precision_sweep__data\n\n\n@plot(\"Frequency vs Attenuation\", plots.frequency_attenuation_msr_phase)\n@plot(\"MSR vs Frequency\", plots.frequency_attenuation_msr_phase__cut)\ndef resonator_punchout(\n platform: AbstractPlatform,\n qubit,\n freq_width,\n freq_step,\n min_att,\n max_att,\n step_att,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"}\n )\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence = PulseSequence()\n sequence.add(ro_pulse)\n\n # TODO: move this explicit instruction to the platform\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step)\n + resonator_frequency\n - (freq_width / 4)\n )\n attenuation_range = np.flip(np.arange(min_att, max_att, step_att))\n count = 0\n for _ in range(software_averages):\n for att in attenuation_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n # TODO: move these explicit instructions to the platform\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.ro_port[qubit].attenuation = att\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr * (np.exp(att / 10)),\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"attenuation[dB]\": att,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR and Phase vs Flux Current\", plots.frequency_flux_msr_phase)\ndef resonator_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline=0,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n # TODO: automatically extract the sweet spot current\n # TODO: add a method to generate the matrix\n\n\n@plot(\"MSR row 1 and Phase row 2\", plots.frequency_flux_msr_phase__matrix)\ndef resonator_spectroscopy_flux_matrix(\n platform: AbstractPlatform,\n qubit,\n freq_width,\n freq_step,\n current_min,\n current_max,\n current_step,\n fluxlines,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = np.arange(current_min, current_max, current_step)\n\n count = 0\n for fluxline in fluxlines:\n fluxline = int(fluxline)\n print(fluxline)\n data = Dataset(\n name=f\"data_q{qubit}_f{fluxline}\",\n quantities={\"frequency\": \"Hz\", \"current\": \"A\"},\n )\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qcvv/calibrations/ramsey.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import ramsey_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr)\ndef ramsey_frequency_detuned(\n platform: AbstractPlatform,\n qubit,\n t_start,\n t_end,\n t_step,\n n_osc,\n points=10,\n):\n platform.reload_settings()\n sampling_rate = platform.sampling_rate\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n\n RX90_pulse1 = platform.create_RX90_pulse(qubit, start=0)\n RX90_pulse2 = platform.create_RX90_pulse(qubit, start=RX90_pulse1.finish)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX90_pulse2.finish)\n\n sequence = PulseSequence()\n sequence.add(RX90_pulse1)\n sequence.add(RX90_pulse2)\n sequence.add(ro_pulse)\n\n runcard_qubit_freq = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n runcard_T2 = platform.characterization[\"single_qubit\"][qubit][\"T2\"]\n intermediate_freq = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n\n current_qubit_freq = runcard_qubit_freq\n current_T2 = runcard_T2\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX90_pulse1.frequency\n )\n\n t_end = np.array(t_end)\n for t_max in t_end:\n count = 0\n platform.qd_port[qubit].lo_frequency = current_qubit_freq - intermediate_freq\n offset_freq = n_osc / t_max * sampling_rate # Hz\n t_range = np.arange(t_start, t_max, t_step)\n for wait in t_range:\n if count % points == 0 and count > 0:\n yield data\n yield ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=current_qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=offset_freq,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n RX90_pulse2.start = RX90_pulse1.finish + wait\n RX90_pulse2.relative_phase = (\n (RX90_pulse2.start / sampling_rate) * (2 * np.pi) * (-offset_freq)\n )\n ro_pulse.start = RX90_pulse2.finish\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"wait[ns]\": wait,\n \"t_max[ns]\": t_max,\n }\n data.add(results)\n count += 1\n\n # # Fitting\n data_fit = ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=current_qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=offset_freq,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n\n new_t2 = data_fit.get_values(\"t2\")\n corrected_qubit_freq = data_fit.get_values(\"corrected_qubit_frequency\")\n\n # if ((new_t2 * 3.5) > t_max):\n if (new_t2 > current_T2).bool() and len(t_end) > 1:\n current_qubit_freq = int(corrected_qubit_freq)\n current_T2 = new_t2\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"}\n )\n else:\n corrected_qubit_freq = int(current_qubit_freq)\n new_t2 = current_T2\n break\n\n yield data\n\n\n@plot(\"MSR vs Time\", plots.time_msr)\ndef ramsey(\n platform: AbstractPlatform,\n qubit,\n delay_between_pulses_start,\n delay_between_pulses_end,\n delay_between_pulses_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n sampling_rate = platform.sampling_rate\n qubit_freq = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n RX90_pulse1 = platform.create_RX90_pulse(qubit, start=0)\n RX90_pulse2 = platform.create_RX90_pulse(qubit, start=RX90_pulse1.finish)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX90_pulse2.finish)\n\n sequence = PulseSequence()\n sequence.add(RX90_pulse1)\n sequence.add(RX90_pulse2)\n sequence.add(ro_pulse)\n\n waits = np.arange(\n delay_between_pulses_start,\n delay_between_pulses_end,\n delay_between_pulses_step,\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX90_pulse1.frequency\n )\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n count = 0\n for _ in range(software_averages):\n for wait in waits:\n if count % points == 0 and count > 0:\n yield data\n yield ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=0,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n RX90_pulse2.start = RX90_pulse1.finish + wait\n ro_pulse.start = RX90_pulse2.finish\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"wait[ns]\": wait,\n \"t_max[ns]\": np.array(delay_between_pulses_end),\n }\n data.add(results)\n count += 1\n yield data\n\nsrc/qcvv/calibrations/rabi_oscillations.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)\ndef rabi_pulse_length_and_amplitude(\n platform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"duration\": \"ns\", \"amplitude\": \"dimensionless\"},\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qcvv/calibrations/qubit_spectroscopy.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef qubit_spectroscopy(\n platform: AbstractPlatform,\n qubit,\n fast_start,\n fast_end,\n fast_step,\n precision_start,\n precision_end,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n freqrange = np.arange(fast_start, fast_end, fast_step) + qubit_frequency\n\n data = Dataset(quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"})\n\n # FIXME: Waiting for Qblox platform to take care of that\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n\n data = Dataset(name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield data\n yield lorentzian_fit(\n data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data.add(results)\n count += 1\n yield data\n\n if platform.resonator_type == \"3D\":\n qubit_frequency = data.df.frequency[\n data.df.MSR.index[data.df.MSR.argmin()]\n ].magnitude\n avg_voltage = (\n np.mean(data.df.MSR.values[: ((fast_end - fast_start) // fast_step)]) * 1e6\n )\n else:\n qubit_frequency = data.df.frequency[\n data.df.MSR.index[data.df.MSR.argmax()]\n ].magnitude\n avg_voltage = (\n np.mean(data.df.MSR.values[: ((fast_end - fast_start) // fast_step)]) * 1e6\n )\n\n prec_data = Dataset(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(precision_start, precision_end, precision_step) + qubit_frequency\n )\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield prec_data\n yield lorentzian_fit(\n data + prec_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n prec_data.add(results)\n count += 1\n yield prec_data\n # TODO: Estimate avg_voltage correctly\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_flux_msr_phase)\ndef qubit_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = np.arange(-freq_width, freq_width, freq_step) + qubit_frequency\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qcvv/calibrations/flipping.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import flipping_fit\n\n\n@plot(\"MSR vs Flips\", plots.flips_msr_phase)\ndef flipping(\n platform: AbstractPlatform,\n qubit,\n niter,\n step,\n points=10,\n):\n platform.reload_settings()\n pi_pulse_amplitude = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"amplitude\"\n ]\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"flips\": \"dimensionless\"})\n\n sequence = PulseSequence()\n RX90_pulse = platform.create_RX90_pulse(qubit, start=0)\n\n count = 0\n # repeat N iter times\n for n in range(0, niter, step):\n if count % points == 0 and count > 0:\n yield data\n yield flipping_fit(\n data,\n x=\"flips[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n niter=niter,\n pi_pulse_amplitude=pi_pulse_amplitude,\n labels=[\"amplitude_delta\", \"corrected_amplitude\"],\n )\n sequence.add(RX90_pulse)\n # execute sequence RX(pi/2) - [RX(pi) - RX(pi)] from 0...n times - RO\n start1 = RX90_pulse.duration\n for j in range(n):\n RX_pulse1 = platform.create_RX_pulse(qubit, start=start1)\n start2 = start1 + RX_pulse1.duration\n RX_pulse2 = platform.create_RX_pulse(qubit, start=start2)\n sequence.add(RX_pulse1)\n sequence.add(RX_pulse2)\n start1 = start2 + RX_pulse2.duration\n\n # add ro pulse at the end of the sequence\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=start1)\n sequence.add(ro_pulse)\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[ro_pulse.serial]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"flips[dimensionless]\": np.array(n),\n }\n data.add(results)\n count += 1\n sequence = PulseSequence()\n\n yield data\n\nsrc/qcvv/calibrations/calibrate_qubit_states.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\n\n\n@plot(\"exc vs gnd\", plots.exc_gnd)\ndef calibrate_qubit_states_binning(\n platform: AbstractPlatform,\n qubit,\n niter,\n points=10,\n):\n platform.reload_settings()\n platform.qrm[qubit].ports['i1'].hardware_demod_en = True # binning only works with hardware demodulation enabled\n # create exc sequence\n exc_sequence = PulseSequence()\n RX_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX_pulse.duration)\n exc_sequence.add(RX_pulse)\n exc_sequence.add(ro_pulse)\n data_exc = Dataset(name=f\"data_exc_q{qubit}\", quantities={\"iteration\": \"dimensionless\"})\n shots_results = platform.execute_pulse_sequence(exc_sequence, nshots=niter)['shots'][ro_pulse.serial]\n for n in np.arange(niter):\n msr, phase, i, q = shots_results[n]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"iteration[dimensionless]\": n,\n }\n data_exc.add(results)\n yield data_exc\n\n gnd_sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n gnd_sequence.add(ro_pulse)\n\n data_gnd = Dataset(name=f\"data_gnd_q{qubit}\", quantities={\"iteration\": \"dimensionless\"})\n\n shots_results = platform.execute_pulse_sequence(gnd_sequence, nshots=niter)['shots'][ro_pulse.serial]\n for n in np.arange(niter):\n msr, phase, i, q = shots_results[n]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"iteration[dimensionless]\": n,\n }\n data_gnd.add(results)\n yield data_gnd\n\nsrc/qcvv/calibrations/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom qcvv.calibrations.flipping import *\nfrom qcvv.calibrations.qubit_spectroscopy import *\nfrom qcvv.calibrations.rabi_oscillations import *\nfrom qcvv.calibrations.ramsey import *\nfrom qcvv.calibrations.resonator_spectroscopy import *\nfrom qcvv.calibrations.t1 import *\nfrom qcvv.calibrations.calibrate_qubit_states import *\n\nsrc/qcvv/calibrations/allXY.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data","type":"infile"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq2.add(results)\n count += 1\n\n # save IQ_module and beta param of each iteration\n yield data_seq1\n yield data_seq2\n\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\n # print(beta_optimal)\n\n\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\n sampling_rate = platform.sampling_rate\n pulse_frequency = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq2.add(results)\n count += 1\n\n # save IQ_module and beta param of each iteration\n yield data_seq1\n yield data_seq2\n\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\n # print(beta_optimal)\n\n\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\n sampling_rate = platform.sampling_rate\n pulse_frequency = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq2.add(results)\n count += 1\n\n # save IQ_module and beta param of each iteration\n yield data_seq1\n yield data_seq2\n\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\n # print(beta_optimal)\n\n\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\n sampling_rate = platform.sampling_rate\n pulse_frequency = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX90_pulse)\n\n if gate == \"RY(pi)\":\n # print(\"Transforming to sequence RY(pi) gate\")\n if beta_param == None:\n RY_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq2.add(results)\n count += 1\n\n # save IQ_module and beta param of each iteration\n yield data_seq1\n yield data_seq2\n\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\n # print(beta_optimal)\n\n\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\n sampling_rate = platform.sampling_rate\n pulse_frequency = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX90_pulse)\n\n if gate == \"RY(pi)\":\n # print(\"Transforming to sequence RY(pi) gate\")\n if beta_param == None:\n RY_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY_pulse)\n\n if gate == \"RY(pi/2)\":\n # print(\"Transforming to sequence RY(pi/2) gate\")\n if beta_param == None:\n RY90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq2.add(results)\n count += 1\n\n # save IQ_module and beta param of each iteration\n yield data_seq1\n yield data_seq2\n\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\n # print(beta_optimal)\n\n\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\n sampling_rate = platform.sampling_rate\n pulse_frequency = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq2.add(results)\n count += 1\n\n # save IQ_module and beta param of each iteration\n yield data_seq1\n yield data_seq2\n\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\n # print(beta_optimal)\n\n\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\n sampling_rate = platform.sampling_rate\n pulse_frequency = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX90_pulse)\n\n if gate == \"RY(pi)\":\n # print(\"Transforming to sequence RY(pi) gate\")\n if beta_param == None:\n RY_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY_pulse)\n\n if gate == \"RY(pi/2)\":\n # print(\"Transforming to sequence RY(pi/2) gate\")\n if beta_param == None:\n RY90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY90_pulse)\n\n sequenceDuration = sequenceDuration + pulse_duration\n pulse_start = pulse_duration\n\n # RO pulse starting just after pair of gates","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq1.add(results)\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"beta_param[dimensionless]\": beta_param,\n }\n data_seq2.add(results)\n count += 1\n\n # save IQ_module and beta param of each iteration\n yield data_seq1\n yield data_seq2\n\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\n # print(beta_optimal)\n\n\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\n sampling_rate = platform.sampling_rate\n pulse_frequency = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX90_pulse)\n\n if gate == \"RY(pi)\":\n # print(\"Transforming to sequence RY(pi) gate\")\n if beta_param == None:\n RY_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY_pulse)\n\n if gate == \"RY(pi/2)\":\n # print(\"Transforming to sequence RY(pi/2) gate\")\n if beta_param == None:","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data_seq1 = Dataset(\n name=f\"data_seq1_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n data_seq2 = Dataset(\n name=f\"data_seq2_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data_seq1\n yield data_seq2\n yield drag_tunning_fit(\n data_seq1,\n data_seq2,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n \"optimal_beta_param_y\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish","type":"random"}],"string":"[\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\\n seq2.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\\n seq2.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq2.add(results)\\n count += 1\\n\\n # save IQ_module and beta param of each iteration\\n yield data_seq1\\n yield data_seq2\\n\\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\\n # print(beta_optimal)\\n\\n\\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\\n sampling_rate = platform.sampling_rate\\n pulse_frequency = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"frequency\\\"\\n ]\\n pulse_duration = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"duration\\\"\\n ]\\n # All gates have equal pulse duration\\n\\n sequence = PulseSequence()\\n\\n sequenceDuration = 0\\n pulse_start = 0\\n\\n for gate in gates:\\n if gate == \\\"I\\\":\\n # print(\\\"Transforming to sequence I gate\\\")\\n pass\\n\\n if gate == \\\"RX(pi)\\\":\\n # print(\\\"Transforming to sequence RX(pi) gate\\\")\\n if beta_param == None:\\n RX_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\\n seq2.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq2.add(results)\\n count += 1\\n\\n # save IQ_module and beta param of each iteration\\n yield data_seq1\\n yield data_seq2\\n\\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\\n # print(beta_optimal)\\n\\n\\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\\n sampling_rate = platform.sampling_rate\\n pulse_frequency = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"frequency\\\"\\n ]\\n pulse_duration = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"duration\\\"\\n ]\\n # All gates have equal pulse duration\\n\\n sequence = PulseSequence()\\n\\n sequenceDuration = 0\\n pulse_start = 0\\n\\n for gate in gates:\\n if gate == \\\"I\\\":\\n # print(\\\"Transforming to sequence I gate\\\")\\n pass\\n\\n if gate == \\\"RX(pi)\\\":\\n # print(\\\"Transforming to sequence RX(pi) gate\\\")\\n if beta_param == None:\\n RX_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX_pulse)\\n\\n if gate == \\\"RX(pi/2)\\\":\\n # print(\\\"Transforming to sequence RX(pi/2) gate\\\")\\n if beta_param == None:\\n RX90_pulse = platform.create_RX90_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX90_pulse = platform.create_RX90_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\\n seq2.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq2.add(results)\\n count += 1\\n\\n # save IQ_module and beta param of each iteration\\n yield data_seq1\\n yield data_seq2\\n\\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\\n # print(beta_optimal)\\n\\n\\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\\n sampling_rate = platform.sampling_rate\\n pulse_frequency = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"frequency\\\"\\n ]\\n pulse_duration = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"duration\\\"\\n ]\\n # All gates have equal pulse duration\\n\\n sequence = PulseSequence()\\n\\n sequenceDuration = 0\\n pulse_start = 0\\n\\n for gate in gates:\\n if gate == \\\"I\\\":\\n # print(\\\"Transforming to sequence I gate\\\")\\n pass\\n\\n if gate == \\\"RX(pi)\\\":\\n # print(\\\"Transforming to sequence RX(pi) gate\\\")\\n if beta_param == None:\\n RX_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX_pulse)\\n\\n if gate == \\\"RX(pi/2)\\\":\\n # print(\\\"Transforming to sequence RX(pi/2) gate\\\")\\n if beta_param == None:\\n RX90_pulse = platform.create_RX90_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX90_pulse = platform.create_RX90_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX90_pulse)\\n\\n if gate == \\\"RY(pi)\\\":\\n # print(\\\"Transforming to sequence RY(pi) gate\\\")\\n if beta_param == None:\\n RY_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n )\\n else:\\n RY_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n beta=beta_param,\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\\n seq2.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq2.add(results)\\n count += 1\\n\\n # save IQ_module and beta param of each iteration\\n yield data_seq1\\n yield data_seq2\\n\\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\\n # print(beta_optimal)\\n\\n\\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\\n sampling_rate = platform.sampling_rate\\n pulse_frequency = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"frequency\\\"\\n ]\\n pulse_duration = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"duration\\\"\\n ]\\n # All gates have equal pulse duration\\n\\n sequence = PulseSequence()\\n\\n sequenceDuration = 0\\n pulse_start = 0\\n\\n for gate in gates:\\n if gate == \\\"I\\\":\\n # print(\\\"Transforming to sequence I gate\\\")\\n pass\\n\\n if gate == \\\"RX(pi)\\\":\\n # print(\\\"Transforming to sequence RX(pi) gate\\\")\\n if beta_param == None:\\n RX_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX_pulse)\\n\\n if gate == \\\"RX(pi/2)\\\":\\n # print(\\\"Transforming to sequence RX(pi/2) gate\\\")\\n if beta_param == None:\\n RX90_pulse = platform.create_RX90_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX90_pulse = platform.create_RX90_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX90_pulse)\\n\\n if gate == \\\"RY(pi)\\\":\\n # print(\\\"Transforming to sequence RY(pi) gate\\\")\\n if beta_param == None:\\n RY_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n )\\n else:\\n RY_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n beta=beta_param,\\n )\\n sequence.add(RY_pulse)\\n\\n if gate == \\\"RY(pi/2)\\\":\\n # print(\\\"Transforming to sequence RY(pi/2) gate\\\")\\n if beta_param == None:\\n RY90_pulse = platform.create_RX90_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n )\\n else:\\n RY90_pulse = platform.create_RX90_drag_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n beta=beta_param,\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\\n seq2.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq2.add(results)\\n count += 1\\n\\n # save IQ_module and beta param of each iteration\\n yield data_seq1\\n yield data_seq2\\n\\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\\n # print(beta_optimal)\\n\\n\\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\\n sampling_rate = platform.sampling_rate\\n pulse_frequency = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"frequency\\\"\\n ]\\n pulse_duration = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"duration\\\"\\n ]\\n # All gates have equal pulse duration\\n\\n sequence = PulseSequence()\\n\\n sequenceDuration = 0\\n pulse_start = 0\\n\\n for gate in gates:\\n if gate == \\\"I\\\":\\n # print(\\\"Transforming to sequence I gate\\\")\\n pass\\n\\n if gate == \\\"RX(pi)\\\":\\n # print(\\\"Transforming to sequence RX(pi) gate\\\")\\n if beta_param == None:\\n RX_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX_pulse)\\n\\n if gate == \\\"RX(pi/2)\\\":\\n # print(\\\"Transforming to sequence RX(pi/2) gate\\\")\\n if beta_param == None:\\n RX90_pulse = platform.create_RX90_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX90_pulse = platform.create_RX90_drag_pulse(\\n qubit,\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\\n seq2.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq2.add(results)\\n count += 1\\n\\n # save IQ_module and beta param of each iteration\\n yield data_seq1\\n yield data_seq2\\n\\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\\n # print(beta_optimal)\\n\\n\\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\\n sampling_rate = platform.sampling_rate\\n pulse_frequency = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"frequency\\\"\\n ]\\n pulse_duration = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"duration\\\"\\n ]\\n # All gates have equal pulse duration\\n\\n sequence = PulseSequence()\\n\\n sequenceDuration = 0\\n pulse_start = 0\\n\\n for gate in gates:\\n if gate == \\\"I\\\":\\n # print(\\\"Transforming to sequence I gate\\\")\\n pass\\n\\n if gate == \\\"RX(pi)\\\":\\n # print(\\\"Transforming to sequence RX(pi) gate\\\")\\n if beta_param == None:\\n RX_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX_pulse)\\n\\n if gate == \\\"RX(pi/2)\\\":\\n # print(\\\"Transforming to sequence RX(pi/2) gate\\\")\\n if beta_param == None:\\n RX90_pulse = platform.create_RX90_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX90_pulse = platform.create_RX90_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX90_pulse)\\n\\n if gate == \\\"RY(pi)\\\":\\n # print(\\\"Transforming to sequence RY(pi) gate\\\")\\n if beta_param == None:\\n RY_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n )\\n else:\\n RY_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n beta=beta_param,\\n )\\n sequence.add(RY_pulse)\\n\\n if gate == \\\"RY(pi/2)\\\":\\n # print(\\\"Transforming to sequence RY(pi/2) gate\\\")\\n if beta_param == None:\\n RY90_pulse = platform.create_RX90_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n )\\n else:\\n RY90_pulse = platform.create_RX90_drag_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n beta=beta_param,\\n )\\n sequence.add(RY90_pulse)\\n\\n sequenceDuration = sequenceDuration + pulse_duration\\n pulse_start = pulse_duration\\n\\n # RO pulse starting just after pair of gates\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\\n )\\n\\n # Rx(pi/2) - Ry(pi) - Ro\\n seq1 = PulseSequence()\\n seq1.add(RX90_drag_pulse)\\n seq1.add(RY_drag_pulse)\\n seq1.add(ro_pulse)\\n msr, i, q, phase = platform.execute_pulse_sequence(seq1, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq1.add(results)\\n\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=RY_drag_pulse.finish, beta=beta_param\\n )\\n\\n # Ry(pi) - Rx(pi/2) - Ro\\n seq2 = PulseSequence()\\n seq2.add(RY_drag_pulse)\\n seq2.add(RX90_drag_pulse)\\n seq2.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq2, nshots=1024)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[deg]\\\": phase,\\n \\\"beta_param[dimensionless]\\\": beta_param,\\n }\\n data_seq2.add(results)\\n count += 1\\n\\n # save IQ_module and beta param of each iteration\\n yield data_seq1\\n yield data_seq2\\n\\n # beta_optimal = fit_drag_tunning(res1, res2, beta_params)\\n # print(beta_optimal)\\n\\n\\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\\n sampling_rate = platform.sampling_rate\\n pulse_frequency = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"frequency\\\"\\n ]\\n pulse_duration = platform.settings[\\\"native_gates\\\"][\\\"single_qubit\\\"][qubit][\\\"RX\\\"][\\n \\\"duration\\\"\\n ]\\n # All gates have equal pulse duration\\n\\n sequence = PulseSequence()\\n\\n sequenceDuration = 0\\n pulse_start = 0\\n\\n for gate in gates:\\n if gate == \\\"I\\\":\\n # print(\\\"Transforming to sequence I gate\\\")\\n pass\\n\\n if gate == \\\"RX(pi)\\\":\\n # print(\\\"Transforming to sequence RX(pi) gate\\\")\\n if beta_param == None:\\n RX_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX_pulse)\\n\\n if gate == \\\"RX(pi/2)\\\":\\n # print(\\\"Transforming to sequence RX(pi/2) gate\\\")\\n if beta_param == None:\\n RX90_pulse = platform.create_RX90_pulse(\\n qubit,\\n start=pulse_start,\\n )\\n else:\\n RX90_pulse = platform.create_RX90_drag_pulse(\\n qubit,\\n start=pulse_start,\\n beta=beta_param,\\n )\\n sequence.add(RX90_pulse)\\n\\n if gate == \\\"RY(pi)\\\":\\n # print(\\\"Transforming to sequence RY(pi) gate\\\")\\n if beta_param == None:\\n RY_pulse = platform.create_RX_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n )\\n else:\\n RY_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=pulse_start,\\n relative_phase=np.pi / 2,\\n beta=beta_param,\\n )\\n sequence.add(RY_pulse)\\n\\n if gate == \\\"RY(pi/2)\\\":\\n # print(\\\"Transforming to sequence RY(pi/2) gate\\\")\\n if beta_param == None:\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import drag_tunning_fit\\n\\n# allXY rotations\\ngatelist = [\\n [\\\"I\\\", \\\"I\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"I\\\"],\\n [\\\"RY(pi/2)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RY(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi)\\\"],\\n [\\\"RX(pi)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi)\\\"],\\n [\\\"RY(pi)\\\", \\\"RY(pi/2)\\\"],\\n [\\\"RX(pi)\\\", \\\"I\\\"],\\n [\\\"RY(pi)\\\", \\\"I\\\"],\\n [\\\"RX(pi/2)\\\", \\\"RX(pi/2)\\\"],\\n [\\\"RY(pi/2)\\\", \\\"RY(pi/2)\\\"],\\n]\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\\ndef allXY(\\n platform: AbstractPlatform,\\n qubit,\\n beta_param=None,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"probability\\\": \\\"dimensionless\\\", \\\"gateNumber\\\": \\\"dimensionless\\\"},\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\\ndef allXY_iteration(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n software_averages=1,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n state0_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state0_voltage\\\"]\\n )\\n state1_voltage = complex(\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"state1_voltage\\\"]\\n )\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\n \\\"probability\\\": \\\"dimensionless\\\",\\n \\\"gateNumber\\\": \\\"dimensionless\\\",\\n \\\"beta_param\\\": \\\"dimensionless\\\",\\n },\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(1):\\n gateNumber = 1\\n for gates in gatelist:\\n if count % points == 0 and count > 0:\\n yield data\\n seq, ro_pulse = _get_sequence_from_gate_pair(\\n platform, gates, qubit, beta_param\\n )\\n seq.add(ro_pulse)\\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\\n ro_pulse.serial\\n ]\\n\\n if platform.resonator_type == \\\"3D\\\":\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state0_voltage - state1_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n else:\\n prob = np.abs(msr * 1e6 - state1_voltage) / (\\n state1_voltage - state0_voltage\\n )\\n prob = (2 * prob) - 1\\n\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"probability[dimensionless]\\\": prob,\\n \\\"gateNumber[dimensionless]\\\": np.array(gateNumber),\\n \\\"beta_param[dimensionless]\\\": np.array(beta_param),\\n }\\n data.add(results)\\n count += 1\\n gateNumber += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\\ndef drag_pulse_tunning(\\n platform: AbstractPlatform,\\n qubit,\\n beta_start,\\n beta_end,\\n beta_step,\\n points=10,\\n):\\n\\n platform.reload_settings()\\n\\n data_seq1 = Dataset(\\n name=f\\\"data_seq1_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n data_seq2 = Dataset(\\n name=f\\\"data_seq2_q{qubit}\\\", quantities={\\\"beta_param\\\": \\\"dimensionless\\\"}\\n )\\n\\n count = 0\\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\\n if count % points == 0 and count > 0:\\n yield data_seq1\\n yield data_seq2\\n yield drag_tunning_fit(\\n data_seq1,\\n data_seq2,\\n x=\\\"beta_param[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"optimal_beta_param\\\",\\n \\\"optimal_beta_param_y\\\",\\n ],\\n )\\n # drag pulse RX(pi/2)\\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\\n qubit, start=0, beta=beta_param\\n )\\n # drag pulse RY(pi)\\n RY_drag_pulse = platform.create_RX_drag_pulse(\\n qubit,\\n start=RX90_drag_pulse.finish,\\n relative_phase=+np.pi / 2,\\n beta=beta_param,\\n )\\n # RO pulse\\n ro_pulse = platform.create_qubit_readout_pulse(\\n qubit, start=RY_drag_pulse.finish\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":[" seq, ro_pulse = _get_sequence_from_gate_pair("," data_seq2.add(results)"," seq.add(ro_pulse)"," data.add(results)","@plot(\"Prob vs gate sequence\", plots.prob_gate)","@plot(\"MSR vs beta parameter\", plots.msr_beta)"," sequence.add(RX_pulse)"," data_seq1 = Dataset("," data = Dataset("," data_seq2 = Dataset("," sequence.add(RX90_pulse)"," sequence.add(RY_pulse)"," seq1.add(RX90_drag_pulse)"," seq1.add(RY_drag_pulse)"," seq1.add(ro_pulse)","@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)"," sequence.add(RY90_pulse)"," data_seq1.add(results)"," seq2.add(RY_drag_pulse)"," seq2.add(RX90_drag_pulse)"," seq2.add(ro_pulse)"," start=pulse_start,"," ro_pulse = platform.create_qubit_readout_pulse(qubit, start=sequenceDuration + 4)"," qubit, start=0, beta=beta_param"," qubit, start=0, relative_phase=np.pi / 2, beta=beta_param"," start=RX90_drag_pulse.finish,"," qubit, start=RY_drag_pulse.finish, beta=beta_param"," qubit, start=RY_drag_pulse.finish","from qcvv.data import Dataset",""," # drag pulse RY(pi)"," # drag pulse RX(pi/2)"," RY90_pulse = platform.create_RX90_pulse("," yield data_seq2"," platform: AbstractPlatform,"," prob = np.abs(msr * 1e6 - state1_voltage) / ("," )"],"string":"[\n \" seq, ro_pulse = _get_sequence_from_gate_pair(\",\n \" data_seq2.add(results)\",\n \" seq.add(ro_pulse)\",\n \" data.add(results)\",\n \"@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate)\",\n \"@plot(\\\"MSR vs beta parameter\\\", plots.msr_beta)\",\n \" sequence.add(RX_pulse)\",\n \" data_seq1 = Dataset(\",\n \" data = Dataset(\",\n \" data_seq2 = Dataset(\",\n \" sequence.add(RX90_pulse)\",\n \" sequence.add(RY_pulse)\",\n \" seq1.add(RX90_drag_pulse)\",\n \" seq1.add(RY_drag_pulse)\",\n \" seq1.add(ro_pulse)\",\n \"@plot(\\\"Prob vs gate sequence\\\", plots.prob_gate_iteration)\",\n \" sequence.add(RY90_pulse)\",\n \" data_seq1.add(results)\",\n \" seq2.add(RY_drag_pulse)\",\n \" seq2.add(RX90_drag_pulse)\",\n \" seq2.add(ro_pulse)\",\n \" start=pulse_start,\",\n \" ro_pulse = platform.create_qubit_readout_pulse(qubit, start=sequenceDuration + 4)\",\n \" qubit, start=0, beta=beta_param\",\n \" qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\",\n \" start=RX90_drag_pulse.finish,\",\n \" qubit, start=RY_drag_pulse.finish, beta=beta_param\",\n \" qubit, start=RY_drag_pulse.finish\",\n \"from qcvv.data import Dataset\",\n \"\",\n \" # drag pulse RY(pi)\",\n \" # drag pulse RX(pi/2)\",\n \" RY90_pulse = platform.create_RX90_pulse(\",\n \" yield data_seq2\",\n \" platform: AbstractPlatform,\",\n \" prob = np.abs(msr * 1e6 - state1_voltage) / (\",\n \" )\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":207,"cells":{"repo_id":{"kind":"number","value":16,"string":"16"},"repo_name":{"kind":"string","value":"qiboteam__qibocal"},"project_context":{"kind":"string","value":"qiboteam__qibocal METASEP\n\ndoc/source/conf.py METASEP\n# -*- coding: utf-8 -*-\n# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\nfrom recommonmark.transform import AutoStructify\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nimport qibocal\n\n# -- Project information -----------------------------------------------------\n\nproject = \"qibocal\"\ncopyright = \"2022, The Qibo team\"\nauthor = \"The Qibo team\"\n\n# The full version, including alpha/beta/rc tags\nrelease = qibocal.__version__\n\n\n# -- General configuration ---------------------------------------------------\n\n# https://stackoverflow.com/questions/56336234/build-fail-sphinx-error-contents-rst-not-found\nmaster_doc = \"index\"\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.doctest\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"recommonmark\",\n \"sphinx_markdown_tables\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# Markdown configuration\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\nsource_suffix = {\".rst\": \"restructuredtext\", \".txt\": \"markdown\", \".md\": \"markdown\"}\n\nautosectionlabel_prefix_document = True\n# Allow to embed rst syntax in markdown files.\nenable_eval_rst = True\n\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = []\n\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"sphinx_rtd_theme\"\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\n# -- Intersphinx -------------------------------------------------------------\n\nintersphinx_mapping = {\"python\": (\"https://docs.python.org/3\", None)}\n\n# -- Autodoc ------------------------------------------------------------------\n#\nautodoc_member_order = \"bysource\"\n\n# Adapted this from\n# https://github.com/readthedocs/recommonmark/blob/ddd56e7717e9745f11300059e4268e204138a6b1/docs/conf.py\n# app setup hook\ndef setup(app):\n app.add_config_value(\"recommonmark_config\", {\"enable_eval_rst\": True}, True)\n app.add_transform(AutoStructify)\n app.add_css_file(\"css/style.css\")\n\nserverscripts/qibocal-update-on-change.py METASEP\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport argparse\n\nimport curio\nimport inotify.adapters\nimport inotify.constants\nfrom curio import subprocess\n\n\nasync def main(folder, exe_args):\n i = inotify.adapters.Inotify()\n i.add_watch(folder)\n\n for event in i.event_gen(yield_nones=False):\n if event is not None:\n (header, _, _, _) = event\n if (\n (header.mask & inotify.constants.IN_CREATE)\n or (header.mask & inotify.constants.IN_DELETE)\n or (header.mask & inotify.constants.IN_MODIFY)\n ):\n await subprocess.run(exe_args)\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\"folder\")\n parser.add_argument(\"exe_args\", nargs=\"+\")\n args = parser.parse_args()\n curio.run(main(args.folder, args.exe_args))\n\nserverscripts/qibocal-index-reports.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"qibocal-index-reports.py\nGenerates a JSON index with reports information.\n\"\"\"\nimport json\nimport pathlib\nimport sys\nfrom collections import ChainMap\n\nimport yaml\n\nROOT = \"/home/users/qibocal/qibocal-reports\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\nOUT = \"/home/users/qibocal/qibocal-reports/index.json\"\nDEFAULTS = {\n \"title\": \"-\",\n \"date\": \"-\",\n \"platform\": \"-\",\n \"start-time\": \"-\",\n \"end-time\": \"-\",\n}\nREQUIRED_FILE_METADATA = {\"title\", \"date\", \"platform\", \"start-time\" \"end-time\"}\n\n\ndef meta_from_path(p):\n meta = ChainMap(DEFAULTS)\n yaml_meta = p / \"meta.yml\"\n yaml_res = {}\n if yaml_meta.exists():\n with yaml_meta.open() as f:\n try:\n yaml_res = yaml.safe_load(f)\n except yaml.YAMLError as e:\n print(f\"Error processing {yaml_meta}: {e}\", file=sys.stderr)\n meta = meta.new_child(yaml_res)\n return meta\n\n\ndef register(p):\n path_meta = meta_from_path(p)\n title, date, platform, start_time, end_time = (\n path_meta[\"title\"],\n path_meta[\"date\"],\n path_meta[\"platform\"],\n path_meta[\"start-time\"],\n path_meta[\"end-time\"],\n )\n url = ROOT_URL + p.name\n titlelink = f'{title}'\n return (titlelink, date, platform, start_time, end_time)\n\n\ndef make_index():\n root_path = pathlib.Path(ROOT)\n data = []\n for p in root_path.iterdir():\n if p.is_dir():\n try:\n res = register(p)\n data.append(res)\n except:\n print(\"Error processing folder\", p, file=sys.stderr)\n raise\n\n with open(OUT, \"w\") as f:\n json.dump({\"data\": data}, f)\n\n\nif __name__ == \"__main__\":\n make_index()\n\nsrc/qibocal/calibrations/protocols/test.py METASEP\n# -*- coding: utf-8 -*-\nfrom qibo import gates, models\n\nfrom qibocal.data import Data\n\n\ndef test(\n platform,\n qubit: list,\n nshots,\n points=1,\n):\n data = Data(\"test\", quantities=[\"nshots\", \"probabilities\"])\n nqubits = len(qubit)\n circuit = models.Circuit(nqubits)\n circuit.add(gates.H(qubit[0]))\n circuit.add(gates.H(qubit[1]))\n # circuit.add(gates.H(1))\n circuit.add(gates.M(*qubit))\n execution = circuit(nshots=nshots)\n\n data.add({\"nshots\": nshots, \"probabilities\": execution.probabilities()})\n yield data\n\nsrc/qibocal/calibrations/protocols/__init__.py METASEP\n\nsrc/qibocal/calibrations/characterization/utils.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\n\n\ndef variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n):\n \"\"\"Helper function for sweeps.\"\"\"\n return np.concatenate(\n (\n np.arange(-lowres_width, -highres_width, lowres_step),\n np.arange(-highres_width, highres_width, highres_step),\n np.arange(highres_width, lowres_width, lowres_step),\n )\n )\n\nsrc/qibocal/calibrations/characterization/t1.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import t1_fit\n\n\n@plot(\"MSR vs Time\", plots.t1_time_msr_phase)\ndef t1(\n platform: AbstractPlatform,\n qubit: int,\n delay_before_readout_start,\n delay_before_readout_end,\n delay_before_readout_step,\n software_averages,\n points=10,\n):\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n ro_wait_range = np.arange(\n delay_before_readout_start, delay_before_readout_end, delay_before_readout_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n count = 0\n for _ in range(software_averages):\n for wait in ro_wait_range:\n if count % points == 0 and count > 0:\n yield data\n yield t1_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"t1\"],\n )\n ro_pulse.start = qd_pulse.duration + wait\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": wait,\n }\n data.add(results)\n count += 1\n yield data\n\nsrc/qibocal/calibrations/characterization/resonator_spectroscopy.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.calibrations.characterization.utils import variable_resolution_scanrange\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef resonator_spectroscopy(\n platform: AbstractPlatform,\n qubit: int,\n lowres_width,\n lowres_step,\n highres_width,\n highres_step,\n precision_width,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n )\n + resonator_frequency\n )\n fast_sweep_data = DataUnits(\n name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield fast_sweep_data\n yield lorentzian_fit(\n fast_sweep_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n fast_sweep_data.add(results)\n count += 1\n yield fast_sweep_data\n\n if platform.resonator_type == \"3D\":\n resonator_frequency = fast_sweep_data.get_values(\"frequency\", \"Hz\")[\n np.argmax(fast_sweep_data.get_values(\"MSR\", \"V\"))\n ]\n avg_voltage = (\n np.mean(\n fast_sweep_data.get_values(\"MSR\", \"V\")[: (lowres_width // lowres_step)]\n )\n * 1e6\n )\n else:\n resonator_frequency = fast_sweep_data.get_values(\"frequency\", \"Hz\")[\n np.argmin(fast_sweep_data.get_values(\"MSR\", \"V\"))\n ]\n avg_voltage = (\n np.mean(\n fast_sweep_data.get_values(\"MSR\", \"V\")[: (lowres_width // lowres_step)]\n )\n * 1e6\n )\n\n precision_sweep__data = DataUnits(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(-precision_width, precision_width, precision_step)\n + resonator_frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield precision_sweep__data\n yield lorentzian_fit(\n fast_sweep_data + precision_sweep__data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n precision_sweep__data.add(results)\n count += 1\n yield precision_sweep__data\n\n\n@plot(\"Frequency vs Attenuation\", plots.frequency_attenuation_msr_phase)\n@plot(\"MSR vs Frequency\", plots.frequency_attenuation_msr_phase__cut)\ndef resonator_punchout(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n min_att,\n max_att,\n step_att,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"}\n )\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence = PulseSequence()\n sequence.add(ro_pulse)\n\n # TODO: move this explicit instruction to the platform\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step)\n + resonator_frequency\n - (freq_width / 4)\n )\n attenuation_range = np.flip(np.arange(min_att, max_att, step_att))\n count = 0\n for _ in range(software_averages):\n for att in attenuation_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n # TODO: move these explicit instructions to the platform\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.ro_port[qubit].attenuation = att\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr * (np.exp(att / 10)),\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"attenuation[dB]\": att,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR and Phase vs Flux Current\", plots.frequency_flux_msr_phase)\ndef resonator_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline=0,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n # TODO: automatically extract the sweet spot current\n # TODO: add a method to generate the matrix\n\n\n@plot(\"MSR row 1 and Phase row 2\", plots.frequency_flux_msr_phase__matrix)\ndef resonator_spectroscopy_flux_matrix(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_min,\n current_max,\n current_step,\n fluxlines,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = np.arange(current_min, current_max, current_step)\n\n count = 0\n for fluxline in fluxlines:\n fluxline = int(fluxline)\n print(fluxline)\n data = DataUnits(\n name=f\"data_q{qubit}_f{fluxline}\",\n quantities={\"frequency\": \"Hz\", \"current\": \"A\"},\n )\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.dispersive_frequency_msr_phase)\ndef dispersive_shift(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield data_spec\n yield lorentzian_fit(\n data_spec,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data_spec.add(results)\n count += 1\n yield data_spec\n\n # Shifted Spectroscopy\n sequence = PulseSequence()\n RX_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX_pulse.finish)\n sequence.add(RX_pulse)\n sequence.add(ro_pulse)\n\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield data_shifted\n yield lorentzian_fit(\n data_shifted,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n fit_file_name=\"fit_shifted\",\n )\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data_shifted.add(results)\n count += 1\n yield data_shifted\n\nsrc/qibocal/calibrations/characterization/ramsey.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import ramsey_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr)\ndef ramsey_frequency_detuned(\n platform: AbstractPlatform,\n qubit: int,\n t_start,\n t_end,\n t_step,\n n_osc,\n points=10,\n):\n platform.reload_settings()\n sampling_rate = platform.sampling_rate\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n\n RX90_pulse1 = platform.create_RX90_pulse(qubit, start=0)\n RX90_pulse2 = platform.create_RX90_pulse(qubit, start=RX90_pulse1.finish)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX90_pulse2.finish)\n\n sequence = PulseSequence()\n sequence.add(RX90_pulse1)\n sequence.add(RX90_pulse2)\n sequence.add(ro_pulse)\n\n runcard_qubit_freq = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n runcard_T2 = platform.characterization[\"single_qubit\"][qubit][\"T2\"]\n intermediate_freq = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n\n current_qubit_freq = runcard_qubit_freq\n current_T2 = runcard_T2\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX90_pulse1.frequency\n )\n\n t_end = np.array(t_end)\n for t_max in t_end:\n count = 0\n platform.qd_port[qubit].lo_frequency = current_qubit_freq - intermediate_freq\n offset_freq = n_osc / t_max * sampling_rate # Hz\n t_range = np.arange(t_start, t_max, t_step)\n for wait in t_range:\n if count % points == 0 and count > 0:\n yield data\n yield ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=current_qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=offset_freq,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n RX90_pulse2.start = RX90_pulse1.finish + wait\n RX90_pulse2.relative_phase = (\n (RX90_pulse2.start / sampling_rate) * (2 * np.pi) * (-offset_freq)\n )\n ro_pulse.start = RX90_pulse2.finish\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"wait[ns]\": wait,\n \"t_max[ns]\": t_max,\n }\n data.add(results)\n count += 1\n\n # # Fitting\n data_fit = ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=current_qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=offset_freq,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n\n new_t2 = data_fit.get_values(\"t2\")\n corrected_qubit_freq = data_fit.get_values(\"corrected_qubit_frequency\")\n\n # if ((new_t2 * 3.5) > t_max):\n if (new_t2 > current_T2).bool() and len(t_end) > 1:\n current_qubit_freq = int(corrected_qubit_freq)\n current_T2 = new_t2\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"}\n )\n else:\n corrected_qubit_freq = int(current_qubit_freq)\n new_t2 = current_T2\n break\n\n yield data\n\n\n@plot(\"MSR vs Time\", plots.time_msr)\ndef ramsey(\n platform: AbstractPlatform,\n qubit: int,\n delay_between_pulses_start,\n delay_between_pulses_end,\n delay_between_pulses_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n sampling_rate = platform.sampling_rate\n qubit_freq = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n RX90_pulse1 = platform.create_RX90_pulse(qubit, start=0)\n RX90_pulse2 = platform.create_RX90_pulse(qubit, start=RX90_pulse1.finish)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX90_pulse2.finish)\n\n sequence = PulseSequence()\n sequence.add(RX90_pulse1)\n sequence.add(RX90_pulse2)\n sequence.add(ro_pulse)\n\n waits = np.arange(\n delay_between_pulses_start,\n delay_between_pulses_end,\n delay_between_pulses_step,\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX90_pulse1.frequency\n )\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n count = 0\n for _ in range(software_averages):\n for wait in waits:\n if count % points == 0 and count > 0:\n yield data\n yield ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=0,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n RX90_pulse2.start = RX90_pulse1.finish + wait\n ro_pulse.start = RX90_pulse2.finish\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"wait[ns]\": wait,\n \"t_max[ns]\": delay_between_pulses_end,\n }\n data.add(results)\n count += 1\n yield data\n\nsrc/qibocal/calibrations/characterization/rabi_oscillations.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"pi_pulse_max_voltage\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit: int,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"pi_pulse_max_voltage\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit: int,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"pi_pulse_max_voltage\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)\ndef rabi_pulse_length_and_amplitude(\n platform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(\n name=f\"data_q{qubit}\",\n quantities={\"duration\": \"ns\", \"amplitude\": \"dimensionless\"},\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qibocal/calibrations/characterization/qubit_spectroscopy.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef qubit_spectroscopy(\n platform: AbstractPlatform,\n qubit: int,\n fast_start,\n fast_end,\n fast_step,\n precision_start,\n precision_end,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n freqrange = np.arange(fast_start, fast_end, fast_step) + qubit_frequency\n\n data = DataUnits(quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"})\n\n # FIXME: Waiting for Qblox platform to take care of that\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n\n data = DataUnits(name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield data\n yield lorentzian_fit(\n data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data.add(results)\n count += 1\n yield data\n\n if platform.resonator_type == \"3D\":\n qubit_frequency = data.get_values(\"frequency\", \"Hz\")[\n np.argmin(data.get_values(\"MSR\", \"V\"))\n ]\n avg_voltage = (\n np.mean(\n data.get_values(\"MSR\", \"V\")[: ((fast_end - fast_start) // fast_step)]\n )\n * 1e6\n )\n else:\n qubit_frequency = data.get_values(\"frequency\", \"Hz\")[\n np.argmax(data.get_values(\"MSR\", \"V\"))\n ]\n avg_voltage = (\n np.mean(\n data.get_values(\"MSR\", \"V\")[: ((fast_end - fast_start) // fast_step)]\n )\n * 1e6\n )\n\n prec_data = DataUnits(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(precision_start, precision_end, precision_step) + qubit_frequency\n )\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield prec_data\n yield lorentzian_fit(\n data + prec_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n prec_data.add(results)\n count += 1\n yield prec_data\n # TODO: Estimate avg_voltage correctly\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_flux_msr_phase)\ndef qubit_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = np.arange(-freq_width, freq_width, freq_step) + qubit_frequency\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qibocal/calibrations/characterization/flipping.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import flipping_fit\n\n\n@plot(\"MSR vs Flips\", plots.flips_msr_phase)\ndef flipping(\n platform: AbstractPlatform,\n qubit: int,\n niter,\n step,\n points=10,\n):\n platform.reload_settings()\n pi_pulse_amplitude = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"amplitude\"\n ]\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"flips\": \"dimensionless\"})\n\n sequence = PulseSequence()\n RX90_pulse = platform.create_RX90_pulse(qubit, start=0)\n\n count = 0\n # repeat N iter times\n for n in range(0, niter, step):\n if count % points == 0 and count > 0:\n yield data\n yield flipping_fit(\n data,\n x=\"flips[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n niter=niter,\n pi_pulse_amplitude=pi_pulse_amplitude,\n labels=[\"amplitude_delta\", \"corrected_amplitude\"],\n )\n sequence.add(RX90_pulse)\n # execute sequence RX(pi/2) - [RX(pi) - RX(pi)] from 0...n times - RO\n start1 = RX90_pulse.duration\n for j in range(n):\n RX_pulse1 = platform.create_RX_pulse(qubit, start=start1)\n start2 = start1 + RX_pulse1.duration\n RX_pulse2 = platform.create_RX_pulse(qubit, start=start2)\n sequence.add(RX_pulse1)\n sequence.add(RX_pulse2)\n start1 = start2 + RX_pulse2.duration\n\n # add ro pulse at the end of the sequence\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=start1)\n sequence.add(ro_pulse)\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[ro_pulse.serial]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"flips[dimensionless]\": n,\n }\n data.add(results)\n count += 1\n sequence = PulseSequence()\n\n yield data\n\nsrc/qibocal/calibrations/characterization/calibrate_qubit_states.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\n\n\n@plot(\"exc vs gnd\", plots.exc_gnd)\ndef calibrate_qubit_states(\n platform: AbstractPlatform,\n qubit: int,\n niter,\n points=10,\n):\n\n # create exc sequence\n exc_sequence = PulseSequence()\n RX_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX_pulse.duration)\n exc_sequence.add(RX_pulse)\n exc_sequence.add(ro_pulse)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX_pulse.frequency\n )\n\n data_exc = DataUnits(name=f\"data_exc_q{qubit}\", quantities={\"iteration\": \"s\"})\n\n count = 0\n for n in np.arange(niter):\n if count % points == 0:\n yield data_exc\n msr, phase, i, q = platform.execute_pulse_sequence(exc_sequence, nshots=1)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"iteration[s]\": n,\n }\n data_exc.add(results)\n count += 1\n yield data_exc\n\n gnd_sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n gnd_sequence.add(ro_pulse)\n\n data_gnd = DataUnits(name=f\"data_gnd_q{qubit}\", quantities={\"iteration\": \"s\"})\n count = 0\n for n in np.arange(niter):\n if count % points == 0:\n yield data_gnd\n\n msr, phase, i, q = platform.execute_pulse_sequence(gnd_sequence, nshots=1)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"iteration[s]\": n,\n }\n data_gnd.add(results)\n count += 1\n yield data_gnd\n\nsrc/qibocal/calibrations/characterization/allXY.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit: int,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = DataUnits(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n ro_pulse_test = platform.create_qubit_readout_pulse(qubit, start=4)\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse_test.frequency\n )\n\n qd_pulse_test = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse_test.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=2048)[\n ro_pulse.serial\n ]\n\n prob = np.abs(msr * 1e6 - state1_voltage) / np.abs(\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": gateNumber,\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit: int,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n ro_pulse_test = platform.create_qubit_readout_pulse(qubit, start=4)\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse_test.frequency\n )\n\n qd_pulse_test = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse_test.frequency\n )\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = DataUnits(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n prob = np.abs(msr * 1e6 - state1_voltage) / np.abs(\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": gateNumber,\n \"beta_param[dimensionless]\": beta_param,\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit: int,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n # platform.reload_settings()\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n ro_pulse_test = platform.create_qubit_readout_pulse(qubit, start=4)\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse_test.frequency\n )\n\n qd_pulse_test = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse_test.frequency\n )\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"})\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data\n yield drag_tunning_fit(\n data,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr1, i1, q1, phase1 = platform.execute_pulse_sequence(seq1)[ro_pulse.serial]\n\n # drag pulse RY(pi/2)\n RY90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi)\n RX_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=RY90_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi/2) - Rx(pi) - Ro\n seq2 = PulseSequence()\n seq2.add(RY90_drag_pulse)\n seq2.add(RX_drag_pulse)\n seq2.add(ro_pulse)\n msr2, phase2, i2, q2 = platform.execute_pulse_sequence(seq2)[ro_pulse.serial]\n results = {\n \"MSR[V]\": msr1 - msr2,\n \"i[V]\": i1 - i2,\n \"q[V]\": q1 - q2,\n \"phase[deg]\": phase1 - phase2,\n \"beta_param[dimensionless]\": beta_param,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\ndef _get_sequence_from_gate_pair(platform: AbstractPlatform, gates, qubit, beta_param):\n\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX90_pulse)\n\n if gate == \"RY(pi)\":\n # print(\"Transforming to sequence RY(pi) gate\")\n if beta_param == None:\n RY_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY_pulse)\n\n if gate == \"RY(pi/2)\":\n # print(\"Transforming to sequence RY(pi/2) gate\")\n if beta_param == None:\n RY90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY90_pulse)\n\n sequenceDuration = sequenceDuration + pulse_duration\n pulse_start = pulse_duration\n\n # RO pulse starting just after pair of gates\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=sequenceDuration + 4)\n\n return sequence, ro_pulse\n\nsrc/qibocal/calibrations/characterization/__init__.py METASEP\n\nsrc/qibocal/web/server.py METASEP\n# -*- coding: utf-8 -*-\nimport os\nimport pathlib\n\nimport yaml\nfrom flask import Flask, render_template\n\nfrom qibocal import __version__\nfrom qibocal.cli.builders import ReportBuilder\n\nserver = Flask(__name__)\n\n\n@server.route(\"/\")\n@server.route(\"/data/\")\ndef page(path=None):\n folders = [\n folder\n for folder in reversed(sorted(os.listdir(os.getcwd())))\n if os.path.isdir(folder) and \"meta.yml\" in os.listdir(folder)\n ]\n\n report = None\n if path is not None:\n try:\n report = ReportBuilder(path)\n except (FileNotFoundError, TypeError):\n pass\n\n return render_template(\n \"template.html\",\n version=__version__,\n folders=folders,\n report=report,\n )\n\nsrc/qibocal/web/report.py METASEP\n# -*- coding: utf-8 -*-\nimport os\nimport pathlib\n\nfrom jinja2 import Environment, FileSystemLoader\n\nfrom qibocal import __version__\nfrom qibocal.cli.builders import ReportBuilder\n\n\ndef create_report(path):\n \"\"\"Creates an HTML report for the data in the given path.\"\"\"\n filepath = pathlib.Path(__file__)\n\n with open(os.path.join(filepath.with_name(\"static\"), \"styles.css\")) as file:\n css_styles = f\"\"\n\n report = ReportBuilder(path)\n env = Environment(loader=FileSystemLoader(filepath.with_name(\"templates\")))\n template = env.get_template(\"template.html\")\n\n html = template.render(\n is_static=True,\n css_styles=css_styles,\n version=__version__,\n report=report,\n )\n\n with open(os.path.join(path, \"index.html\"), \"w\") as file:\n file.write(html)\n\nsrc/qibocal/web/app.py METASEP\n# -*- coding: utf-8 -*-\nimport os\n\nimport pandas as pd\nimport yaml\nfrom dash import Dash, Input, Output, dcc, html\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.web.server import server\n\nDataUnits() # dummy dataset call to suppress ``pint[V]`` error\n\napp = Dash(\n server=server,\n suppress_callback_exceptions=True,\n)\n\napp.layout = html.Div(\n [\n dcc.Location(id=\"url\", refresh=False),\n dcc.Graph(id=\"graph\", figure={}),\n dcc.Interval(\n id=\"interval\",\n # TODO: Perhaps the user should be allowed to change the refresh rate\n interval=1000,\n n_intervals=0,\n disabled=False,\n ),\n ]\n)\n\n\n@app.callback(\n Output(\"graph\", \"figure\"),\n Input(\"interval\", \"n_intervals\"),\n Input(\"graph\", \"figure\"),\n Input(\"url\", \"pathname\"),\n)\ndef get_graph(n, current_figure, url):\n method, folder, routine, qubit, format = url.split(os.sep)[2:]\n try:\n # data = DataUnits.load_data(folder, routine, format, \"precision_sweep\")\n # with open(f\"{folder}/platform.yml\", \"r\") as f:\n # nqubits = yaml.safe_load(f)[\"nqubits\"]\n # if len(data) > 2:\n # params, fit = resonator_spectroscopy_fit(folder, format, nqubits)\n # else:\n # params, fit = None, None\n # return getattr(plots.resonator_spectroscopy, method)(data, params, fit)\n\n # # FIXME: Temporarily hardcode the plotting method to test\n # # multiple routines with different names in one folder\n # # should be changed to:\n # # return getattr(getattr(plots, routine), method)(data)\n\n return getattr(plots, method)(folder, routine, qubit, format)\n except (FileNotFoundError, pd.errors.EmptyDataError):\n return current_figure\n\nsrc/qibocal/web/__init__.py METASEP\n\nsrc/qibocal/tests/test_data.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Some tests for the Data and DataUnits class\"\"\"\nimport numpy as np\nimport pytest\nfrom pint import DimensionalityError, UndefinedUnitError\n\nfrom qibocal.data import Data, DataUnits\n\n\ndef random_data_units(length, options=None):\n data = DataUnits(options=options)\n for l in range(length):\n msr, i, q, phase = np.random.rand(4)\n pulse_sequence_result = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n }\n add_options = {}\n if options is not None:\n for option in options:\n add_options[option] = str(l)\n data.add({**pulse_sequence_result, **add_options})\n\n return data\n\n\ndef random_data(length):\n data = Data()\n for i in range(length):\n data.add({\"int\": int(i), \"float\": float(i), \"string\": str(i), \"bool\": bool(i)})\n return data\n\n\ndef test_data_initialization():\n \"\"\"Test DataUnits constructor\"\"\"\n data = DataUnits()\n assert len(data.df.columns) == 4\n assert list(data.df.columns) == [ # pylint: disable=E1101\n \"MSR\",\n \"i\",\n \"q\",\n \"phase\",\n ]\n\n data1 = DataUnits(quantities={\"attenuation\": \"dB\"})\n assert len(data1.df.columns) == 5\n assert list(data1.df.columns) == [ # pylint: disable=E1101\n \"attenuation\",\n \"MSR\",\n \"i\",\n \"q\",\n \"phase\",\n ]\n\n data2 = DataUnits(quantities={\"attenuation\": \"dB\"}, options=[\"option1\"])\n assert len(data2.df.columns) == 6\n assert list(data2.df.columns) == [ # pylint: disable=E1101\n \"option1\",\n \"attenuation\",\n \"MSR\",\n \"i\",\n \"q\",\n \"phase\",\n ]\n\n\ndef test_data_units_units():\n \"\"\"Test units of measure in DataUnits\"\"\"\n data_units = DataUnits()\n assert data_units.df.MSR.values.units == \"volt\"\n\n data_units1 = DataUnits(quantities={\"frequency\": \"Hz\"})\n assert data_units1.df.frequency.values.units == \"hertz\"\n\n with pytest.raises(UndefinedUnitError):\n data_units2 = DataUnits(quantities={\"fake_unit\": \"fake\"})\n\n\ndef test_data_units_add():\n \"\"\"Test add method of DataUnits\"\"\"\n data_units = random_data_units(5)\n assert len(data_units) == 5\n\n data_units1 = DataUnits(quantities={\"attenuation\": \"dB\"})\n msr, i, q, phase, att = np.random.rand(len(data_units1.df.columns))\n data_units1.add(\n {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"attenuation[dB]\": att,\n }\n )\n assert len(data_units1) == 1\n\n data_units1.add(\n {\n \"MSR[V]\": 0,\n \"i[V]\": 0.0,\n \"q[V]\": 0.0,\n \"phase[deg]\": 0,\n \"attenuation[dB]\": 1,\n }\n )\n assert len(data_units1) == 2\n\n data_units2 = DataUnits()\n msr, i, q, phase = np.random.rand(len(data_units2.df.columns))\n with pytest.raises(DimensionalityError):\n data_units2.add({\"MSR[dB]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\n with pytest.raises(UndefinedUnitError):\n data_units2.add({\"MSR[test]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\n data_units3 = random_data_units(10, options=[\"test\"])\n assert len(data_units3) == 10\n\n\ndef test_data_add():\n \"\"\"Test add method of Data class\"\"\"\n data = random_data(5)\n assert len(data) == 5\n data.add({\"int\": 123, \"float\": 123.456, \"string\": \"123\", \"bool\": True})\n assert len(data) == 6\n\n\ndef test_data_units_load_data_from_dict():\n \"\"\"Test set method of DataUnits class\"\"\"\n data_units = DataUnits()\n test = {\n \"MSR[V]\": [1, 2, 3],\n \"i[V]\": [3.0, 4.0, 5.0],\n \"q[V]\": np.array([3, 4, 5]),\n \"phase[deg]\": [6.0, 7.0, 8.0],\n }\n data_units.load_data_from_dict(test)\n assert len(data_units) == 3\n assert (data_units.get_values(\"MSR\", \"V\") == [1, 2, 3]).all()\n assert (data_units.get_values(\"i\", \"V\") == [3.0, 4.0, 5.0]).all()\n assert (data_units.get_values(\"q\", \"V\") == [3, 4, 5]).all()\n assert (data_units.get_values(\"phase\", \"deg\") == [6.0, 7.0, 8.0]).all()\n\n data_units1 = DataUnits(options=[\"option1\", \"option2\"])\n test = {\"option1\": [\"one\", \"two\", \"three\"], \"option2\": [1, 2, 3]}\n data_units1.load_data_from_dict(test)\n assert len(data_units1) == 3\n assert (data_units1.get_values(\"option1\") == [\"one\", \"two\", \"three\"]).all()\n assert (data_units1.get_values(\"option2\") == [1, 2, 3]).all()\n\n\ndef test_data_load_data_from_dict():\n \"\"\"Test set method of Data class\"\"\"\n data = random_data(5)\n test = {\n \"int\": [1, 2, 3],\n \"float\": [3.0, 4.0, 5.0],\n \"string\": [\"one\", \"two\", \"three\"],\n \"bool\": [True, False, True],\n }\n data.load_data_from_dict(test)\n assert len(data) == 3\n assert (data.get_values(\"int\") == [1, 2, 3]).all()\n assert (data.get_values(\"float\") == [3.0, 4.0, 5.0]).all()\n assert (data.get_values(\"string\") == [\"one\", \"two\", \"three\"]).all()\n assert (data.get_values(\"bool\") == [True, False, True]).all()\n\n\ndef test_get_values_data_units():\n \"\"\"Test get_values method of DataUnits class\"\"\"\n data_units = random_data_units(5, options=[\"option\"])\n\n assert (data_units.get_values(\"option\") == data_units.df[\"option\"]).all()\n assert (\n data_units.get_values(\"MSR\", \"uV\")\n == data_units.df[\"MSR\"].pint.to(\"uV\").pint.magnitude\n ).all()\n\n\ndef test_get_values_data():\n \"\"\"Test get_values method of Data class\"\"\"\n data = random_data(5)\n assert (data.get_values(\"int\") == data.df[\"int\"]).all()\n\nsrc/qibocal/fitting/utils.py METASEP\n# -*- coding: utf-8 -*-\nimport re\n\nimport numpy as np\n\n\ndef lorenzian(frequency, amplitude, center, sigma, offset):\n # http://openafox.com/science/peak-function-derivations.html\n return (amplitude / np.pi) * (\n sigma / ((frequency - center) ** 2 + sigma**2)\n ) + offset\n\n\ndef rabi(x, p0, p1, p2, p3, p4):\n # A fit to Superconducting Qubit Rabi Oscillation\n # Offset : p[0]\n # Oscillation amplitude : p[1]\n # Period T : 1/p[2]\n # Phase : p[3]\n # Arbitrary parameter T_2 : 1/p[4]\n return p0 + p1 * np.sin(2 * np.pi * x * p2 + p3) * np.exp(-x * p4)\n\n\ndef ramsey(x, p0, p1, p2, p3, p4):\n # A fit to Superconducting Qubit Rabi Oscillation\n # Offset : p[0]\n # Oscillation amplitude : p[1]\n # DeltaFreq : p[2]\n # Phase : p[3]\n # Arbitrary parameter T_2 : 1/p[4]\n return p0 + p1 * np.sin(2 * np.pi * x * p2 + p3) * np.exp(-x * p4)\n\n\ndef exp(x, *p):\n return p[0] - p[1] * np.exp(-1 * x * p[2])\n\n\ndef flipping(x, p0, p1, p2, p3):\n # A fit to Flipping Qubit oscillation\n # Epsilon?? shoule be Amplitude : p[0]\n # Offset : p[1]\n # Period of oscillation : p[2]\n # phase for the first point corresponding to pi/2 rotation : p[3]\n return np.sin(x * 2 * np.pi / p2 + p3) * p0 + p1\n\n\ndef cos(x, p0, p1, p2, p3):\n # Offset : p[0]\n # Amplitude : p[1]\n # Period : p[2]\n # Phase : p[3]\n return p0 + p1 * np.cos(2 * np.pi * x / p2 + p3)\n\n\ndef parse(key):\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n return name, unit\n\nsrc/qibocal/fitting/methods.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e9\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"popt0\": fit_res.best_values[\"amplitude\"],\n \"popt1\": fit_res.best_values[\"center\"],\n \"popt2\": fit_res.best_values[\"sigma\"],\n \"popt3\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n pi_pulse_max_voltage = smooth_dataset.max()\n t2 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: pi_pulse_max_voltage,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq + delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n\n\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n ],\n )\n\n beta_params = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n 0, # Offset: p[0]\n beta_params.values[np.argmax(voltages)]\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\n 4, # Period: p[2]\n 0.3, # Phase: p[3]\n ]\n\n try:\n popt, pcov = curve_fit(cos, beta_params.values, voltages.values)\n smooth_dataset = cos(beta_params.values, popt[0], popt[1], popt[2], popt[3])\n beta_optimal = beta_params.values[np.argmin(smooth_dataset)]\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: beta_optimal,\n }\n )\n return data_fit\n\nsrc/qibocal/fitting/__init__.py METASEP\n\nsrc/qibocal/cli/builders.py METASEP\n# -*- coding: utf-8 -*-\nimport datetime\nimport inspect\nimport os\nimport shutil\n\nimport yaml\n\nfrom qibocal import calibrations\nfrom qibocal.config import log, raise_error\nfrom qibocal.data import Data\n\n\ndef load_yaml(path):\n \"\"\"Load yaml file from disk.\"\"\"\n with open(path) as file:\n data = yaml.safe_load(file)\n return data\n\n\nclass ActionBuilder:\n \"\"\"Class for parsing and executing runcards.\n Args:\n runcard (path): path containing the runcard.\n folder (path): path for the output folder.\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n\n def __init__(self, runcard, folder=None, force=False):\n path, self.folder = self._generate_output_folder(folder, force)\n self.runcard = load_yaml(runcard)\n # Qibolab default backend if not provided in runcard.\n backend_name = self.runcard.get(\"backend\", \"qibolab\")\n platform_name = self.runcard.get(\"platform\", \"dummy\")\n self.backend, self.platform = self._allocate_backend(\n backend_name, platform_name, path\n )\n self.qubits = self.runcard[\"qubits\"]\n self.format = self.runcard[\"format\"]\n\n # Saving runcard\n shutil.copy(runcard, f\"{path}/runcard.yml\")\n self.save_meta(path, self.folder)\n\n @staticmethod\n def _generate_output_folder(folder, force):\n \"\"\"Static method for generating the output folder.\n Args:\n folder (path): path for the output folder. If None it will be created a folder automatically\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n if folder is None:\n import getpass\n\n e = datetime.datetime.now()\n user = getpass.getuser().replace(\".\", \"-\")\n date = e.strftime(\"%Y-%m-%d\")\n folder = f\"{date}-{'000'}-{user}\"\n num = 0\n while os.path.exists(folder):\n log.info(f\"Directory {folder} already exists.\")\n num += 1\n folder = f\"{date}-{str(num).rjust(3, '0')}-{user}\"\n log.info(f\"Trying to create directory {folder}\")\n elif os.path.exists(folder) and not force:\n raise_error(RuntimeError, f\"Directory {folder} already exists.\")\n elif os.path.exists(folder) and force:\n log.warning(f\"Deleting previous directory {folder}.\")\n shutil.rmtree(os.path.join(os.getcwd(), folder))\n\n path = os.path.join(os.getcwd(), folder)\n log.info(f\"Creating directory {folder}.\")\n os.makedirs(path)\n return path, folder\n\n def _allocate_backend(self, backend_name, platform_name, path):\n \"\"\"Allocate the platform using Qibolab.\"\"\"\n from qibo.backends import GlobalBackend, set_backend\n\n if backend_name == \"qibolab\":\n from qibolab.paths import qibolab_folder\n\n original_runcard = qibolab_folder / \"runcards\" / f\"{platform_name}.yml\"\n # copy of the original runcard that will stay unmodified\n shutil.copy(original_runcard, f\"{path}/platform.yml\")\n # copy of the original runcard that will be modified during calibration\n updated_runcard = f\"{self.folder}/new_platform.yml\"\n shutil.copy(original_runcard, updated_runcard)\n # allocate backend with updated_runcard\n set_backend(\n backend=backend_name, platform=platform_name, runcard=updated_runcard\n )\n backend = GlobalBackend()\n return backend, backend.platform\n else:\n set_backend(backend=backend_name, platform=platform_name)\n backend = GlobalBackend()\n return backend, None\n\n def save_meta(self, path, folder):\n import qibocal\n\n e = datetime.datetime.now(datetime.timezone.utc)\n meta = {}\n meta[\"title\"] = folder\n meta[\"backend\"] = str(self.backend)\n meta[\"platform\"] = str(self.backend.platform)\n meta[\"date\"] = e.strftime(\"%Y-%m-%d\")\n meta[\"start-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"versions\"] = self.backend.versions # pylint: disable=E1101\n meta[\"versions\"][\"qibocal\"] = qibocal.__version__\n\n with open(f\"{path}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n def _build_single_action(self, name):\n \"\"\"Helper method to parse the actions in the runcard.\"\"\"\n f = getattr(calibrations, name)\n path = os.path.join(self.folder, f\"data/{name}/\")\n os.makedirs(path)\n sig = inspect.signature(f)\n params = self.runcard[\"actions\"][name]\n for param in list(sig.parameters)[2:-1]:\n if param not in params:\n raise_error(AttributeError, f\"Missing parameter {param} in runcard.\")\n if f.__annotations__[\"qubit\"] == int:\n single_qubit_action = True\n else:\n single_qubit_action = False\n\n return f, params, path, single_qubit_action\n\n def execute(self):\n \"\"\"Method to execute sequentially all the actions in the runcard.\"\"\"\n if self.platform is not None:\n self.platform.connect()\n self.platform.setup()\n self.platform.start()\n\n for action in self.runcard[\"actions\"]:\n routine, args, path, single_qubit_action = self._build_single_action(action)\n self._execute_single_action(routine, args, path, single_qubit_action)\n\n if self.platform is not None:\n self.platform.stop()\n self.platform.disconnect()\n\n def _execute_single_action(self, routine, arguments, path, single_qubit):\n \"\"\"Method to execute a single action and retrieving the results.\"\"\"\n if self.format is None:\n raise_error(ValueError, f\"Cannot store data using {self.format} format.\")\n if single_qubit:\n for qubit in self.qubits:\n results = routine(self.platform, qubit, **arguments)\n\n for data in results:\n getattr(data, f\"to_{self.format}\")(path)\n\n if self.platform is not None:\n self.update_platform_runcard(qubit, routine.__name__)\n else:\n results = routine(self.platform, self.qubits, **arguments)\n\n for data in results:\n getattr(data, f\"to_{self.format}\")(path)\n\n if self.platform is not None:\n self.update_platform_runcard(qubit, routine.__name__)\n\n def update_platform_runcard(self, qubit, routine):\n\n try:\n data_fit = Data.load_data(\n self.folder, routine, self.format, f\"fit_q{qubit}\"\n )\n except:\n data_fit = Data()\n\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n settings = load_yaml(f\"{self.folder}/new_platform.yml\")\n\n for param in params:\n settings[\"characterization\"][\"single_qubit\"][qubit][param] = int(\n data_fit.get_values(param)\n )\n\n with open(f\"{self.folder}/new_platform.yml\", \"w\") as file:\n yaml.dump(\n settings, file, sort_keys=False, indent=4, default_flow_style=None\n )\n\n def dump_report(self):\n from qibocal.web.report import create_report\n\n # update end time\n meta = load_yaml(f\"{self.folder}/meta.yml\")\n e = datetime.datetime.now(datetime.timezone.utc)\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n with open(f\"{self.folder}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n create_report(self.folder)\n\n\nclass ReportBuilder:\n \"\"\"Parses routines and plots to report and live plotting page.\n\n Args:\n path (str): Path to the data folder to generate report for.\n \"\"\"\n\n def __init__(self, path):\n self.path = path\n self.metadata = load_yaml(os.path.join(path, \"meta.yml\"))\n\n # find proper path title\n base, self.title = os.path.join(os.getcwd(), path), \"\"\n while self.title in (\"\", \".\"):\n base, self.title = os.path.split(base)\n\n self.runcard = load_yaml(os.path.join(path, \"runcard.yml\"))\n self.format = self.runcard.get(\"format\")\n self.qubits = self.runcard.get(\"qubits\")\n\n # create calibration routine objects\n # (could be incorporated to :meth:`qibocal.cli.builders.ActionBuilder._build_single_action`)\n self.routines = []\n for action in self.runcard.get(\"actions\"):\n if hasattr(calibrations, action):\n routine = getattr(calibrations, action)\n else:\n raise_error(ValueError, f\"Undefined action {action} in report.\")\n\n if not hasattr(routine, \"plots\"):\n routine.plots = []\n self.routines.append(routine)\n\n def get_routine_name(self, routine):\n \"\"\"Prettify routine's name for report headers.\"\"\"\n return routine.__name__.replace(\"_\", \" \").title()\n\n def get_figure(self, routine, method, qubit):\n \"\"\"Get html figure for report.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n import tempfile\n\n figure = method(self.path, routine.__name__, qubit, self.format)\n with tempfile.NamedTemporaryFile() as temp:\n figure.write_html(temp.name, include_plotlyjs=False, full_html=False)\n fightml = temp.read().decode(\"utf-8\")\n return fightml\n\n def get_live_figure(self, routine, method, qubit):\n \"\"\"Get url to dash page for live plotting.\n\n This url is used by :meth:`qibocal.web.app.get_graph`.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n return os.path.join(\n method.__name__,\n self.path,\n routine.__name__,\n str(qubit),\n self.format,\n )\n\nsrc/qibocal/cli/_base.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Adds global CLI options.\"\"\"\nimport base64\nimport pathlib\nimport shutil\nimport socket\nimport subprocess\nimport uuid\nfrom urllib.parse import urljoin\n\nimport click\nfrom qibo.config import log, raise_error\n\nfrom qibocal.cli.builders import ActionBuilder\n\nCONTEXT_SETTINGS = dict(help_option_names=[\"-h\", \"--help\"])\n\n# options for report upload\nUPLOAD_HOST = (\n \"qibocal@localhost\"\n if socket.gethostname() == \"saadiyat\"\n else \"qibocal@login.qrccluster.com\"\n)\nTARGET_DIR = \"qibocal-reports/\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"runcard\", metavar=\"RUNCARD\", type=click.Path(exists=True))\n@click.option(\n \"folder\",\n \"-o\",\n type=click.Path(),\n help=\"Output folder. If not provided a standard name will generated.\",\n)\n@click.option(\n \"force\",\n \"-f\",\n is_flag=True,\n help=\"Use --force option to overwrite the output folder.\",\n)\ndef command(runcard, folder, force=None):\n\n \"\"\"qibocal: Quantum Calibration Verification and Validation using Qibo.\n\n Arguments:\n\n - RUNCARD: runcard with declarative inputs.\n \"\"\"\n\n builder = ActionBuilder(runcard, folder, force)\n builder.execute()\n builder.dump_report()\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.option(\n \"port\",\n \"-p\",\n \"--port\",\n default=8050,\n type=int,\n help=\"Localhost port to launch dash server.\",\n)\n@click.option(\n \"debug\",\n \"-d\",\n \"--debug\",\n is_flag=True,\n help=\"Launch server in debugging mode.\",\n)\ndef live_plot(port, debug):\n \"\"\"Real time plotting of calibration data on a dash server.\"\"\"\n import socket\n\n from qibocal.web.app import app\n\n # change port if it is already used\n while True:\n with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n if s.connect_ex((\"localhost\", port)) != 0:\n break\n port += 1\n\n app.run_server(debug=debug, port=port)\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"output_folder\", metavar=\"FOLDER\", type=click.Path(exists=True))\ndef upload(output_folder):\n \"\"\"Uploads output folder to server\"\"\"\n\n output_path = pathlib.Path(output_folder)\n\n # check the rsync command exists.\n if not shutil.which(\"rsync\"):\n raise_error(\n RuntimeError,\n \"Could not find the rsync command. Please make sure it is installed.\",\n )\n\n # check that we can authentica with a certificate\n ssh_command_line = (\n \"ssh\",\n \"-o\",\n \"PreferredAuthentications=publickey\",\n \"-q\",\n UPLOAD_HOST,\n \"exit\",\n )\n\n str_line = \" \".join(repr(ele) for ele in ssh_command_line)\n\n log.info(f\"Checking SSH connection to {UPLOAD_HOST}.\")\n\n try:\n subprocess.run(ssh_command_line, check=True)\n except subprocess.CalledProcessError as e:\n raise RuntimeError(\n (\n \"Could not validate the SSH key. \"\n \"The command\\n%s\\nreturned a non zero exit status. \"\n \"Please make sure that your public SSH key is on the server.\"\n )\n % str_line\n ) from e\n except OSError as e:\n raise RuntimeError(\n \"Could not run the command\\n{}\\n: {}\".format(str_line, e)\n ) from e\n\n log.info(\"Connection seems OK.\")\n\n # upload output\n randname = base64.urlsafe_b64encode(uuid.uuid4().bytes).decode()\n newdir = TARGET_DIR + randname\n\n rsync_command = (\n \"rsync\",\n \"-aLz\",\n \"--chmod=ug=rwx,o=rx\",\n f\"{output_path}/\",\n f\"{UPLOAD_HOST}:{newdir}\",\n )\n\n log.info(f\"Uploading output ({output_path}) to {UPLOAD_HOST}\")\n try:\n subprocess.run(rsync_command, check=True)\n except subprocess.CalledProcessError as e:\n msg = f\"Failed to upload output: {e}\"\n raise RuntimeError(msg) from e\n\n url = urljoin(ROOT_URL, randname)\n log.info(f\"Upload completed. The result is available at:\\n{url}\")\n\nsrc/qibocal/cli/__init__.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"CLI entry point.\"\"\"\nfrom ._base import command, live_plot, upload\n\nsrc/qibocal/calibrations/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom qibocal.calibrations.characterization.allXY import *\nfrom qibocal.calibrations.characterization.calibrate_qubit_states import *\nfrom qibocal.calibrations.characterization.flipping import *\nfrom qibocal.calibrations.characterization.qubit_spectroscopy import *\nfrom qibocal.calibrations.characterization.rabi_oscillations import *\nfrom qibocal.calibrations.characterization.ramsey import *\nfrom qibocal.calibrations.characterization.resonator_spectroscopy import *\nfrom qibocal.calibrations.characterization.t1 import *\nfrom qibocal.calibrations.protocols.test import *\n\nsrc/qibocal/decorators.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Decorators implementation.\"\"\"\nimport os\n\nfrom qibocal.config import raise_error\n\n\ndef plot(header, method):\n \"\"\"Decorator for adding plots in the report and live plotting page.\n\n Args:\n header (str): Header of the plot to use in the report.\n method (Callable): Plotting method defined under ``qibocal.plots``.\n \"\"\"\n\n def wrapped(f):\n if hasattr(f, \"plots\"):\n # insert in the beginning of the list to have\n # proper plot ordering in the report\n f.plots.insert(0, (header, method))\n else:\n f.plots = [(header, method)]\n return f\n\n return wrapped\n\nsrc/qibocal/data.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Implementation of DataUnits and Data class to store calibration routines outputs.\"\"\"\n\nimport re\nfrom abc import abstractmethod\n\nimport numpy as np\nimport pandas as pd\nimport pint_pandas\n\nfrom qibocal.config import raise_error\n\n\nclass AbstractData:\n def __init__(self, name=None):\n\n if name is None:\n self.name = \"data\"\n else:\n self.name = name\n\n self.df = pd.DataFrame()\n self.quantities = None\n\n def __add__(self, data):\n self.df = pd.concat([self.df, data.df], ignore_index=True)\n return self\n\n @abstractmethod\n def add(self, data):\n raise_error(NotImplementedError)\n\n def __len__(self):\n \"\"\"Computes the length of the data.\"\"\"\n return len(self.df)\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n raise_error(NotImplementedError)\n\n @abstractmethod\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n if self.quantities == None:\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n else:\n self.df.pint.dequantify().to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\n\nclass DataUnits(AbstractData):\n \"\"\"Class to store the data measured during the calibration routines.\n It is a wrapper to a pandas DataFrame with units of measure from the Pint\n library.\n\n Args:\n quantities (dict): dictionary containing additional quantities that the user\n may save other than the pulse sequence output. The keys are the name of the\n quantities and the corresponding values are the units of measure.\n options (list): list containing additional values to be saved.\n \"\"\"\n\n def __init__(self, name=None, quantities=None, options=None):\n\n super().__init__(name=name)\n\n self._df = pd.DataFrame(\n {\n \"MSR\": pd.Series(dtype=\"pint[V]\"),\n \"i\": pd.Series(dtype=\"pint[V]\"),\n \"q\": pd.Series(dtype=\"pint[V]\"),\n \"phase\": pd.Series(dtype=\"pint[deg]\"),\n }\n )\n self.quantities = {\"MSR\": \"V\", \"i\": \"V\", \"q\": \"V\", \"phase\": \"rad\"}\n self.options = []\n\n if quantities is not None:\n self.quantities.update(quantities)\n for name, unit in quantities.items():\n self.df.insert(0, name, pd.Series(dtype=f\"pint[{unit}]\"))\n\n if options is not None:\n self.options = options\n for option in options:\n self.df.insert( # pylint: disable=E1101\n 0, option, pd.Series(dtype=object)\n )\n\n from pint import UnitRegistry\n\n self.ureg = UnitRegistry()\n\n @property\n def df(self):\n return self._df\n\n @df.setter\n def df(self, df):\n \"\"\"Set df attribute.\n\n Args:\n df (pd.DataFrame): pandas DataFrame. Every key should have the following form:\n ``[]``.\n \"\"\"\n if isinstance(df, pd.DataFrame):\n self._df = df\n else:\n raise_error(TypeError, f\"{df.type} is not a pd.DataFrame.\")\n\n def load_data_from_dict(self, data: dict):\n \"\"\"Set df attribute.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n processed_data = {}\n for key, values in data.items():\n if \"[\" in key:\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n processed_data[name] = pd.Series(\n data=(np.array(values) * self.ureg(unit)), dtype=f\"pint[{unit}]\"\n )\n else:\n processed_data[key] = pd.Series(data=(values), dtype=object)\n self._df = pd.DataFrame(processed_data)\n\n def add(self, data):\n \"\"\"Add a row to `DataUnits`.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n\n for key, value in data.items():\n if \"[\" in key:\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n # TODO: find a better way to do this\n self.df.loc[l, name] = np.array(value) * self.ureg(unit)\n else:\n self.df.loc[l, key] = value\n\n def get_values(self, key, unit=None):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n unit (str): Unit of the returned values.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n if unit is None:\n return self.df[key]\n else:\n return self.df[key].pint.to(unit).pint.magnitude\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n data (``DataUnits``): dataset object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file, header=[0, 1])\n obj.df.pop(\"Unnamed: 0_level_0\")\n quantities_label = []\n obj.options = []\n for column in obj.df.columns: # pylint: disable=E1101\n if \"Unnamed\" not in column[1]:\n quantities_label.append(column[0])\n else:\n obj.options.append(column[0])\n quantities_df = obj.df[quantities_label].pint.quantify()\n options_df = obj.df[obj.options]\n options_df.columns = options_df.columns.droplevel(1)\n obj.df = pd.concat([quantities_df, options_df], axis=1)\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n data = self.df[list(self.quantities)].pint.dequantify()\n firsts = data.index.get_level_values(None)\n data[self.options] = self.df[self.options].loc[firsts].values\n data.to_csv(f\"{path}/{self.name}.csv\")\n\n\nclass Data(AbstractData):\n \"\"\"Class to store the data obtained from calibration routines.\n It is a wrapper to a pandas DataFrame.\n\n Args:\n quantities (dict): dictionary quantities to be saved.\n \"\"\"\n\n def __init__(self, name=None, quantities=None):\n\n super().__init__(name=name)\n\n if quantities is not None:\n self.quantities = quantities\n for name in quantities:\n self.df.insert(0, name, pd.Series(dtype=object))\n\n @property\n def df(self):\n return self._df\n\n @df.setter\n def df(self, data):\n \"\"\"Set df attribute.\n\n Args:\n df (pd.DataFrame):\n \"\"\"\n if isinstance(data, pd.DataFrame):\n self._df = data\n\n def load_data_from_dict(self, data: dict):\n \"\"\"Set df attribute.\n\n Args:\n df (dict): dictionary containing the data to be added.\n \"\"\"\n processed_data = {}\n for key, values in data.items():\n processed_data[key] = pd.Series(data=(values), dtype=object)\n self._df = pd.DataFrame(processed_data)\n\n def add(self, data):\n \"\"\"Add a row to data.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n for key, value in data.items():\n self.df.loc[l, key] = value\n\n def get_values(self, quantity):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n return self.df[quantity].values\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n data (``Data``): data object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file)\n obj.df.pop(\"Unnamed: 0\")\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\nsrc/qibocal/config.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Custom logger implemenation.\"\"\"\nimport logging\nimport os\n\n# Logging levels available here https://docs.python.org/3/library/logging.html#logging-levels\nQIBOCAL_LOG_LEVEL = 30\nif \"QIBOCAL_LOG_LEVEL\" in os.environ: # pragma: no cover\n QIBOCAL_LOG_LEVEL = 10 * int(os.environ.get(\"QIBOCAL_LOG_LEVEL\"))\n\n\ndef raise_error(exception, message=None, args=None):\n \"\"\"Raise exception with logging error.\n\n Args:\n exception (Exception): python exception.\n message (str): the error message.\n \"\"\"\n log.error(message)\n if args:\n raise exception(message, args)\n else:\n raise exception(message)\n\n\n# Configuration for logging mechanism\nclass CustomHandler(logging.StreamHandler):\n \"\"\"Custom handler for logging algorithm.\"\"\"\n\n def format(self, record):\n \"\"\"Format the record with specific format.\"\"\"\n from qibocal import __version__\n\n fmt = f\"[Qibocal {__version__}|%(levelname)s|%(asctime)s]: %(message)s\"\n\n grey = \"\\x1b[38;20m\"\n green = \"\\x1b[92m\"\n yellow = \"\\x1b[33;20m\"\n red = \"\\x1b[31;20m\"\n bold_red = \"\\x1b[31;1m\"\n reset = \"\\x1b[0m\"\n\n self.FORMATS = {\n logging.DEBUG: green + fmt + reset,\n logging.INFO: grey + fmt + reset,\n logging.WARNING: yellow + fmt + reset,\n logging.ERROR: red + fmt + reset,\n logging.CRITICAL: bold_red + fmt + reset,\n }\n log_fmt = self.FORMATS.get(record.levelno)\n return logging.Formatter(log_fmt, datefmt=\"%Y-%m-%d %H:%M:%S\").format(record)\n\n\n# allocate logger object\nlog = logging.getLogger(__name__)\nlog.setLevel(QIBOCAL_LOG_LEVEL)\nlog.addHandler(CustomHandler())\n\nsrc/qibocal/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom .cli import command, live_plot, upload\n\n\"\"\"qibocal: Quantum Calibration Verification and Validation using Qibo.\"\"\"\nimport importlib.metadata as im\n\n__version__ = im.version(__package__)\n\nsrc/qibocal/plots/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom qibocal.plots.heatmaps import *\nfrom qibocal.plots.scatters import *\n\nsrc/qibocal/plots/spectroscopies.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_shifted),\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_shifted),\n )\n params = [i for i in list(data_fit_shifted.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit_shifted.get_values(\"popt0\"),\n data_fit_shifted.get_values(\"popt1\"),\n data_fit_shifted.get_values(\"popt2\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_shifted),\n )\n params = [i for i in list(data_fit_shifted.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit_shifted.get_values(\"popt0\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_shifted),\n )\n params = [i for i in list(data_fit_shifted.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit_shifted.get_values(\"popt0\"),\n data_fit_shifted.get_values(\"popt1\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_shifted),\n )\n params = [i for i in list(data_fit_shifted.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_shifted),\n )\n params = [i for i in list(data_fit_shifted.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit_shifted.get_values(\"popt0\"),\n data_fit_shifted.get_values(\"popt1\"),\n data_fit_shifted.get_values(\"popt2\"),\n data_fit_shifted.get_values(\"popt3\"),\n ),\n name=\"Fit shifted spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_shifted),\n )\n params = [i for i in list(data_fit_shifted.df.keys()) if \"popt\" not in i]\n fig.add_trace(","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),","type":"random"}],"string":"[\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\\n min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\\n min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_shifted),\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\\n min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_shifted),\\n )\\n params = [i for i in list(data_fit_shifted.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit_shifted.get_values(\\\"popt0\\\"),\\n data_fit_shifted.get_values(\\\"popt1\\\"),\\n data_fit_shifted.get_values(\\\"popt2\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\\n min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_shifted),\\n )\\n params = [i for i in list(data_fit_shifted.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit_shifted.get_values(\\\"popt0\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\\n min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_shifted),\\n )\\n params = [i for i in list(data_fit_shifted.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit_shifted.get_values(\\\"popt0\\\"),\\n data_fit_shifted.get_values(\\\"popt1\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\\n min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_shifted),\\n )\\n params = [i for i in list(data_fit_shifted.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\\n min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_shifted),\\n )\\n params = [i for i in list(data_fit_shifted.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit_shifted.get_values(\\\"popt0\\\"),\\n data_fit_shifted.get_values(\\\"popt1\\\"),\\n data_fit_shifted.get_values(\\\"popt2\\\"),\\n data_fit_shifted.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit shifted spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n\\n # fitting shifted traces\\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\\n freqrange = np.linspace(\\n min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_shifted),\\n )\\n params = [i for i in list(data_fit_shifted.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Attenuation (dB)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Attenuation (dB)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\\n row=1,\\n col=j,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n showscale=showscale,\\n ),\\n row=2,\\n col=j,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n )\\n return fig\\n\\n\\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\\n\\n try:\\n data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\\n except:\\n data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n\\n try:\\n data_shifted = DataUnits.load_data(\\n folder, routine, formato, f\\\"data_shifted_q{qubit}\\\"\\n )\\n except:\\n data_shifted = DataUnits(\\n name=f\\\"data_shifted_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"}\\n )\\n\\n try:\\n data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n try:\\n data_fit_shifted = Data.load_data(\\n folder, routine, formato, f\\\"fit_shifted_q{qubit}\\\"\\n )\\n except:\\n data_fit_shifted = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Shifted Spectroscopy\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n\\n # fitting traces\\n if len(data_spec) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_spec),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit spectroscopy\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nimport plotly.graph_objects as go\\nfrom plotly.subplots import make_subplots\\n\\nfrom qibocal.data import Data, DataUnits\\nfrom qibocal.fitting.utils import lorenzian\\n\\n\\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\\n try:\\n data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\\n except:\\n data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_precision = DataUnits.load_data(\\n folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\\n )\\n except:\\n data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\\n try:\\n data_fit = Data.load_data(folder, routine, format, f\\\"fit_q{qubit}\\\")\\n except:\\n data_fit = Data(\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"label1\\\",\\n \\\"label2\\\",\\n ]\\n )\\n\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Fast\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Scatter(\\n x=data_precision.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data_precision.get_values(\\\"phase\\\", \\\"rad\\\"),\\n name=\\\"Precision\\\",\\n ),\\n row=1,\\n col=2,\\n )\\n if len(data_fast) > 0 and len(data_fit) > 0:\\n freqrange = np.linspace(\\n min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\\n 2 * len(data_fast),\\n )\\n params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\\n fig.add_trace(\\n go.Scatter(\\n x=freqrange,\\n y=lorenzian(\\n freqrange,\\n data_fit.get_values(\\\"popt0\\\"),\\n data_fit.get_values(\\\"popt1\\\"),\\n data_fit.get_values(\\\"popt2\\\"),\\n data_fit.get_values(\\\"popt3\\\"),\\n ),\\n name=\\\"Fit\\\",\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.25,\\n showarrow=False,\\n text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.add_annotation(\\n dict(\\n font=dict(color=\\\"black\\\", size=12),\\n x=0,\\n y=-0.30,\\n showarrow=False,\\n text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\\n textangle=0,\\n xanchor=\\\"left\\\",\\n xref=\\\"paper\\\",\\n yref=\\\"paper\\\",\\n )\\n )\\n fig.update_layout(\\n showlegend=True,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (uV)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Phase (rad)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\\n\\n fig = go.Figure()\\n # index data on a specific attenuation value\\n smalldf = data.df[data.get_values(\\\"attenuation\\\", \\\"dB\\\") == plot1d_attenuation].copy()\\n # split multiple software averages to different datasets\\n datasets = []\\n while len(smalldf):\\n datasets.append(smalldf.drop_duplicates(\\\"frequency\\\"))\\n smalldf.drop(datasets[-1].index, inplace=True)\\n fig.add_trace(\\n go.Scatter(\\n x=datasets[-1][\\\"frequency\\\"].pint.to(\\\"GHz\\\").pint.magnitude,\\n y=datasets[-1][\\\"MSR\\\"].pint.to(\\\"V\\\").pint.magnitude,\\n ),\\n )\\n\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting,\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"MSR (V)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\\n fig = make_subplots(\\n rows=1,\\n cols=2,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n subplot_titles=(\\n \\\"MSR (V)\\\",\\n \\\"phase (rad)\\\",\\n ),\\n )\\n\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n colorbar_x=0.45,\\n ),\\n row=1,\\n col=1,\\n )\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\\n colorbar_x=1.0,\\n ),\\n row=1,\\n col=2,\\n )\\n fig.update_layout(\\n showlegend=False,\\n uirevision=\\\"0\\\", # ``uirevision`` allows zooming while live plotting\\n xaxis_title=\\\"Frequency (GHz)\\\",\\n yaxis_title=\\\"Current (A)\\\",\\n xaxis2_title=\\\"Frequency (GHz)\\\",\\n yaxis2_title=\\\"Current (A)\\\",\\n )\\n return fig\\n\\n\\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\\n fluxes = []\\n for i in range(25): # FIXME: 25 is hardcoded\\n file = f\\\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\\\"\\n if os.path.exists(file):\\n fluxes += [i]\\n\\n if len(fluxes) < 1:\\n nb = 1\\n else:\\n nb = len(fluxes)\\n fig = make_subplots(\\n rows=2,\\n cols=nb,\\n horizontal_spacing=0.1,\\n vertical_spacing=0.1,\\n x_title=\\\"Frequency (Hz)\\\",\\n y_title=\\\"Current (A)\\\",\\n shared_xaxes=True,\\n shared_yaxes=True,\\n )\\n\\n for j in fluxes:\\n if j == fluxes[-1]:\\n showscale = True\\n else:\\n showscale = False\\n data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\\n fig.add_trace(\\n go.Heatmap(\\n x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\\n y=data.get_values(\\\"current\\\", \\\"A\\\"),\\n z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\\n showscale=showscale,\\n ),\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":[" data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})"," data_fit = Data("," y=data_spec.get_values(\"MSR\", \"uV\"),"," data_precision = DataUnits.load_data("," min(data_shifted.get_values(\"frequency\", \"GHz\")),"," data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")"," data_shifted = DataUnits("," x=data_fast.get_values(\"frequency\", \"GHz\"),"," y=data_fast.get_values(\"phase\", \"rad\"),"," y=data_shifted.get_values(\"MSR\", \"uV\"),"," max(data_shifted.get_values(\"frequency\", \"GHz\")),"," params = [i for i in list(data_fit_shifted.df.keys()) if \"popt\" not in i]"," data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")"," min(data_fast.get_values(\"frequency\", \"GHz\")),"," data_fit.get_values(\"popt3\"),"," data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")"," x=data_shifted.get_values(\"frequency\", \"GHz\"),"," data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")"," data_fit.get_values(\"popt1\"),"," y=data_spec.get_values(\"phase\", \"rad\"),"," max(data_fast.get_values(\"frequency\", \"GHz\")),"," y=data_precision.get_values(\"MSR\", \"uV\"),"," y=data_fast.get_values(\"MSR\", \"uV\"),"," data_fit_shifted.get_values(\"popt3\"),"," data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})"," y=data.get_values(\"current\", \"A\"),"," data_fit_shifted.get_values(\"popt1\"),"," y=data.get_values(\"attenuation\", \"dB\"),"," data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})"," params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]"," x=data.get_values(\"frequency\", \"GHz\"),"," data_fit_shifted = Data("," y=data_shifted.get_values(\"phase\", \"rad\"),"," data_fit.get_values(\"popt2\"),"," data_fit_shifted.get_values(\"popt2\"),"," data_fit_shifted.get_values(\"popt0\"),"," y=lorenzian("," data_fit_shifted = Data.load_data("," text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\","," data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")"," text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\","," z=data.get_values(\"MSR\", \"V\"),"," folder, routine, format, f\"precision_sweep_q{qubit}\""," z=data.get_values(\"phase\", \"rad\"),"," data_shifted = DataUnits.load_data("," x=data_spec.get_values(\"frequency\", \"GHz\"),"," text=f\"The estimated shifted {params[0]} is {data_fit_shifted.df[params[0]][0]:.1f} Hz.\","," smalldf.drop(datasets[-1].index, inplace=True)"," )",""," go.Scatter("," fig = make_subplots("," xref=\"paper\","," )"," row=1,"],"string":"[\n \" data_precision = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\",\n \" data_fit = Data(\",\n \" y=data_spec.get_values(\\\"MSR\\\", \\\"uV\\\"),\",\n \" data_precision = DataUnits.load_data(\",\n \" min(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\",\n \" data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}\\\")\",\n \" data_shifted = DataUnits(\",\n \" x=data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \" y=data_fast.get_values(\\\"phase\\\", \\\"rad\\\"),\",\n \" y=data_shifted.get_values(\\\"MSR\\\", \\\"uV\\\"),\",\n \" max(data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\")),\",\n \" params = [i for i in list(data_fit_shifted.df.keys()) if \\\"popt\\\" not in i]\",\n \" data = DataUnits.load_data(folder, routine, format, f\\\"data_q{qubit}_f{j}\\\")\",\n \" min(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\",\n \" data_fit.get_values(\\\"popt3\\\"),\",\n \" data_spec = DataUnits.load_data(folder, routine, formato, f\\\"data_q{qubit}\\\")\",\n \" x=data_shifted.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \" data_fast = DataUnits.load_data(folder, routine, format, f\\\"fast_sweep_q{qubit}\\\")\",\n \" data_fit.get_values(\\\"popt1\\\"),\",\n \" y=data_spec.get_values(\\\"phase\\\", \\\"rad\\\"),\",\n \" max(data_fast.get_values(\\\"frequency\\\", \\\"GHz\\\")),\",\n \" y=data_precision.get_values(\\\"MSR\\\", \\\"uV\\\"),\",\n \" y=data_fast.get_values(\\\"MSR\\\", \\\"uV\\\"),\",\n \" data_fit_shifted.get_values(\\\"popt3\\\"),\",\n \" data_fast = DataUnits(quantities={\\\"frequency\\\": \\\"Hz\\\"})\",\n \" y=data.get_values(\\\"current\\\", \\\"A\\\"),\",\n \" data_fit_shifted.get_values(\\\"popt1\\\"),\",\n \" y=data.get_values(\\\"attenuation\\\", \\\"dB\\\"),\",\n \" data_spec = DataUnits(name=f\\\"data_q{qubit}\\\", quantities={\\\"frequency\\\": \\\"Hz\\\"})\",\n \" params = [i for i in list(data_fit.df.keys()) if \\\"popt\\\" not in i]\",\n \" x=data.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \" data_fit_shifted = Data(\",\n \" y=data_shifted.get_values(\\\"phase\\\", \\\"rad\\\"),\",\n \" data_fit.get_values(\\\"popt2\\\"),\",\n \" data_fit_shifted.get_values(\\\"popt2\\\"),\",\n \" data_fit_shifted.get_values(\\\"popt0\\\"),\",\n \" y=lorenzian(\",\n \" data_fit_shifted = Data.load_data(\",\n \" text=f\\\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\\\",\",\n \" data_fit = Data.load_data(folder, routine, formato, f\\\"fit_q{qubit}\\\")\",\n \" text=f\\\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\\\",\",\n \" z=data.get_values(\\\"MSR\\\", \\\"V\\\"),\",\n \" folder, routine, format, f\\\"precision_sweep_q{qubit}\\\"\",\n \" z=data.get_values(\\\"phase\\\", \\\"rad\\\"),\",\n \" data_shifted = DataUnits.load_data(\",\n \" x=data_spec.get_values(\\\"frequency\\\", \\\"GHz\\\"),\",\n \" text=f\\\"The estimated shifted {params[0]} is {data_fit_shifted.df[params[0]][0]:.1f} Hz.\\\",\",\n \" smalldf.drop(datasets[-1].index, inplace=True)\",\n \" )\",\n \"\",\n \" go.Scatter(\",\n \" fig = make_subplots(\",\n \" xref=\\\"paper\\\",\",\n \" )\",\n \" row=1,\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":208,"cells":{"repo_id":{"kind":"number","value":16,"string":"16"},"repo_name":{"kind":"string","value":"qiboteam__qibocal"},"project_context":{"kind":"string","value":"qiboteam__qibocal METASEP\n\nsrc/qibocal/calibrations/protocols/test.py METASEP\n# -*- coding: utf-8 -*-\nfrom qibo import gates, models\n\nfrom qibocal.data import Data\n\n\ndef test(\n platform,\n qubit: list,\n nshots,\n points=1,\n):\n data = Data(\"test\", quantities=[\"nshots\", \"probabilities\"])\n nqubits = len(qubit)\n circuit = models.Circuit(nqubits)\n circuit.add(gates.H(qubit[0]))\n circuit.add(gates.H(qubit[1]))\n # circuit.add(gates.H(1))\n circuit.add(gates.M(*qubit))\n execution = circuit(nshots=nshots)\n\n data.add({\"nshots\": nshots, \"probabilities\": execution.probabilities()})\n yield data\n\nsrc/qibocal/calibrations/protocols/__init__.py METASEP\n\nsrc/qibocal/calibrations/characterization/utils.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\n\n\ndef variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n):\n \"\"\"Helper function for sweeps.\"\"\"\n return np.concatenate(\n (\n np.arange(-lowres_width, -highres_width, lowres_step),\n np.arange(-highres_width, highres_width, highres_step),\n np.arange(highres_width, lowres_width, lowres_step),\n )\n )\n\nsrc/qibocal/calibrations/characterization/t1.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import Dataset\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import t1_fit\n\n\n@plot(\"MSR vs Time\", plots.t1_time_msr_phase)\ndef t1(\n platform: AbstractPlatform,\n qubit: int,\n delay_before_readout_start,\n delay_before_readout_end,\n delay_before_readout_step,\n software_averages,\n points=10,\n):\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n ro_wait_range = np.arange(\n delay_before_readout_start, delay_before_readout_end, delay_before_readout_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n count = 0\n for _ in range(software_averages):\n for wait in ro_wait_range:\n if count % points == 0 and count > 0:\n yield data\n yield t1_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"t1\"],\n )\n ro_pulse.start = qd_pulse.duration + wait\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": wait,\n }\n data.add(results)\n count += 1\n yield data\n\nsrc/qibocal/calibrations/characterization/resonator_spectroscopy.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.calibrations.characterization.utils import variable_resolution_scanrange\nfrom qibocal.data import Dataset\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef resonator_spectroscopy(\n platform: AbstractPlatform,\n qubit: int,\n lowres_width,\n lowres_step,\n highres_width,\n highres_step,\n precision_width,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n )\n + resonator_frequency\n )\n fast_sweep_data = Dataset(\n name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield fast_sweep_data\n yield lorentzian_fit(\n fast_sweep_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n fast_sweep_data.add(results)\n count += 1\n yield fast_sweep_data\n\n # FIXME: have live ploting work for multiple datasets saved\n\n if platform.resonator_type == \"3D\":\n resonator_frequency = fast_sweep_data.df.frequency[\n fast_sweep_data.df.MSR.index[fast_sweep_data.df.MSR.argmax()]\n ].magnitude\n avg_voltage = (\n np.mean(fast_sweep_data.df.MSR.values[: (lowres_width // lowres_step)])\n * 1e6\n )\n else:\n resonator_frequency = fast_sweep_data.df.frequency[\n fast_sweep_data.df.MSR.index[fast_sweep_data.df.MSR.argmin()]\n ].magnitude\n avg_voltage = (\n np.mean(fast_sweep_data.df.MSR.values[: (lowres_width // lowres_step)])\n * 1e6\n )\n\n precision_sweep__data = Dataset(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(-precision_width, precision_width, precision_step)\n + resonator_frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield precision_sweep__data\n yield lorentzian_fit(\n fast_sweep_data + precision_sweep__data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n precision_sweep__data.add(results)\n count += 1\n yield precision_sweep__data\n\n\n@plot(\"Frequency vs Attenuation\", plots.frequency_attenuation_msr_phase)\n@plot(\"MSR vs Frequency\", plots.frequency_attenuation_msr_phase__cut)\ndef resonator_punchout(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n min_att,\n max_att,\n step_att,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"}\n )\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence = PulseSequence()\n sequence.add(ro_pulse)\n\n # TODO: move this explicit instruction to the platform\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step)\n + resonator_frequency\n - (freq_width / 4)\n )\n attenuation_range = np.flip(np.arange(min_att, max_att, step_att))\n count = 0\n for _ in range(software_averages):\n for att in attenuation_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n # TODO: move these explicit instructions to the platform\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.ro_port[qubit].attenuation = att\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr * (np.exp(att / 10)),\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"attenuation[dB]\": att,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR and Phase vs Flux Current\", plots.frequency_flux_msr_phase)\ndef resonator_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline=0,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n # TODO: automatically extract the sweet spot current\n # TODO: add a method to generate the matrix\n\n\n@plot(\"MSR row 1 and Phase row 2\", plots.frequency_flux_msr_phase__matrix)\ndef resonator_spectroscopy_flux_matrix(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_min,\n current_max,\n current_step,\n fluxlines,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = np.arange(current_min, current_max, current_step)\n\n count = 0\n for fluxline in fluxlines:\n fluxline = int(fluxline)\n print(fluxline)\n data = Dataset(\n name=f\"data_q{qubit}_f{fluxline}\",\n quantities={\"frequency\": \"Hz\", \"current\": \"A\"},\n )\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.dispersive_frequency_msr_phase)\ndef dispersive_shift(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n\n data_spec = Dataset(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield data_spec\n yield lorentzian_fit(\n data_spec,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data_spec.add(results)\n count += 1\n yield data_spec\n\n # Shifted Spectroscopy\n sequence = PulseSequence()\n RX_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX_pulse.finish)\n sequence.add(RX_pulse)\n sequence.add(ro_pulse)\n\n data_shifted = Dataset(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield data_shifted\n yield lorentzian_fit(\n data_spec,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n fit_file_name=\"fit_shifted\",\n )\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data_shifted.add(results)\n count += 1\n yield data_shifted\n\nsrc/qibocal/calibrations/characterization/ramsey.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import Dataset\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import ramsey_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr)\ndef ramsey_frequency_detuned(\n platform: AbstractPlatform,\n qubit: int,\n t_start,\n t_end,\n t_step,\n n_osc,\n points=10,\n):\n platform.reload_settings()\n sampling_rate = platform.sampling_rate\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n\n RX90_pulse1 = platform.create_RX90_pulse(qubit, start=0)\n RX90_pulse2 = platform.create_RX90_pulse(qubit, start=RX90_pulse1.finish)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX90_pulse2.finish)\n\n sequence = PulseSequence()\n sequence.add(RX90_pulse1)\n sequence.add(RX90_pulse2)\n sequence.add(ro_pulse)\n\n runcard_qubit_freq = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n runcard_T2 = platform.characterization[\"single_qubit\"][qubit][\"T2\"]\n intermediate_freq = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n\n current_qubit_freq = runcard_qubit_freq\n current_T2 = runcard_T2\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX90_pulse1.frequency\n )\n\n t_end = np.array(t_end)\n for t_max in t_end:\n count = 0\n platform.qd_port[qubit].lo_frequency = current_qubit_freq - intermediate_freq\n offset_freq = n_osc / t_max * sampling_rate # Hz\n t_range = np.arange(t_start, t_max, t_step)\n for wait in t_range:\n if count % points == 0 and count > 0:\n yield data\n yield ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=current_qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=offset_freq,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n RX90_pulse2.start = RX90_pulse1.finish + wait\n RX90_pulse2.relative_phase = (\n (RX90_pulse2.start / sampling_rate) * (2 * np.pi) * (-offset_freq)\n )\n ro_pulse.start = RX90_pulse2.finish\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"wait[ns]\": wait,\n \"t_max[ns]\": t_max,\n }\n data.add(results)\n count += 1\n\n # # Fitting\n data_fit = ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=current_qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=offset_freq,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n\n new_t2 = data_fit.get_values(\"t2\")\n corrected_qubit_freq = data_fit.get_values(\"corrected_qubit_frequency\")\n\n # if ((new_t2 * 3.5) > t_max):\n if (new_t2 > current_T2).bool() and len(t_end) > 1:\n current_qubit_freq = int(corrected_qubit_freq)\n current_T2 = new_t2\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"}\n )\n else:\n corrected_qubit_freq = int(current_qubit_freq)\n new_t2 = current_T2\n break\n\n yield data\n\n\n@plot(\"MSR vs Time\", plots.time_msr)\ndef ramsey(\n platform: AbstractPlatform,\n qubit: int,\n delay_between_pulses_start,\n delay_between_pulses_end,\n delay_between_pulses_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n sampling_rate = platform.sampling_rate\n qubit_freq = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n RX90_pulse1 = platform.create_RX90_pulse(qubit, start=0)\n RX90_pulse2 = platform.create_RX90_pulse(qubit, start=RX90_pulse1.finish)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX90_pulse2.finish)\n\n sequence = PulseSequence()\n sequence.add(RX90_pulse1)\n sequence.add(RX90_pulse2)\n sequence.add(ro_pulse)\n\n waits = np.arange(\n delay_between_pulses_start,\n delay_between_pulses_end,\n delay_between_pulses_step,\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX90_pulse1.frequency\n )\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n count = 0\n for _ in range(software_averages):\n for wait in waits:\n if count % points == 0 and count > 0:\n yield data\n yield ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=0,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n RX90_pulse2.start = RX90_pulse1.finish + wait\n ro_pulse.start = RX90_pulse2.finish\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"wait[ns]\": wait,\n \"t_max[ns]\": np.array(delay_between_pulses_end),\n }\n data.add(results)\n count += 1\n yield data\n\nsrc/qibocal/calibrations/characterization/rabi_oscillations.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import Dataset\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit: int,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit: int,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)\ndef rabi_pulse_length_and_amplitude(\n platform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"duration\": \"ns\", \"amplitude\": \"dimensionless\"},\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qibocal/calibrations/characterization/qubit_spectroscopy.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import Dataset\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef qubit_spectroscopy(\n platform: AbstractPlatform,\n qubit: int,\n fast_start,\n fast_end,\n fast_step,\n precision_start,\n precision_end,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n freqrange = np.arange(fast_start, fast_end, fast_step) + qubit_frequency\n\n data = Dataset(quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"})\n\n # FIXME: Waiting for Qblox platform to take care of that\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n\n data = Dataset(name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield data\n yield lorentzian_fit(\n data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data.add(results)\n count += 1\n yield data\n\n if platform.resonator_type == \"3D\":\n qubit_frequency = data.df.frequency[\n data.df.MSR.index[data.df.MSR.argmin()]\n ].magnitude\n avg_voltage = (\n np.mean(data.df.MSR.values[: ((fast_end - fast_start) // fast_step)]) * 1e6\n )\n else:\n qubit_frequency = data.df.frequency[\n data.df.MSR.index[data.df.MSR.argmax()]\n ].magnitude\n avg_voltage = (\n np.mean(data.df.MSR.values[: ((fast_end - fast_start) // fast_step)]) * 1e6\n )\n\n prec_data = Dataset(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(precision_start, precision_end, precision_step) + qubit_frequency\n )\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield prec_data\n yield lorentzian_fit(\n data + prec_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n prec_data.add(results)\n count += 1\n yield prec_data\n # TODO: Estimate avg_voltage correctly\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_flux_msr_phase)\ndef qubit_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = np.arange(-freq_width, freq_width, freq_step) + qubit_frequency\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qibocal/calibrations/characterization/flipping.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import Dataset\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import flipping_fit\n\n\n@plot(\"MSR vs Flips\", plots.flips_msr_phase)\ndef flipping(\n platform: AbstractPlatform,\n qubit: int,\n niter,\n step,\n points=10,\n):\n platform.reload_settings()\n pi_pulse_amplitude = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"amplitude\"\n ]\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"flips\": \"dimensionless\"})\n\n sequence = PulseSequence()\n RX90_pulse = platform.create_RX90_pulse(qubit, start=0)\n\n count = 0\n # repeat N iter times\n for n in range(0, niter, step):\n if count % points == 0 and count > 0:\n yield data\n yield flipping_fit(\n data,\n x=\"flips[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n niter=niter,\n pi_pulse_amplitude=pi_pulse_amplitude,\n labels=[\"amplitude_delta\", \"corrected_amplitude\"],\n )\n sequence.add(RX90_pulse)\n # execute sequence RX(pi/2) - [RX(pi) - RX(pi)] from 0...n times - RO\n start1 = RX90_pulse.duration\n for j in range(n):\n RX_pulse1 = platform.create_RX_pulse(qubit, start=start1)\n start2 = start1 + RX_pulse1.duration\n RX_pulse2 = platform.create_RX_pulse(qubit, start=start2)\n sequence.add(RX_pulse1)\n sequence.add(RX_pulse2)\n start1 = start2 + RX_pulse2.duration\n\n # add ro pulse at the end of the sequence\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=start1)\n sequence.add(ro_pulse)\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[ro_pulse.serial]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"flips[dimensionless]\": np.array(n),\n }\n data.add(results)\n count += 1\n sequence = PulseSequence()\n\n yield data\n\nsrc/qibocal/calibrations/characterization/calibrate_qubit_states.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import Dataset\nfrom qibocal.decorators import plot\n\n\n@plot(\"exc vs gnd\", plots.exc_gnd)\ndef calibrate_qubit_states_binning(\n platform: AbstractPlatform,\n qubit: int,\n niter,\n points=10,\n):\n platform.reload_settings()\n platform.qrm[qubit].ports[\n \"i1\"\n ].hardware_demod_en = True # binning only works with hardware demodulation enabled\n # create exc sequence\n exc_sequence = PulseSequence()\n RX_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX_pulse.duration)\n exc_sequence.add(RX_pulse)\n exc_sequence.add(ro_pulse)\n data_exc = Dataset(\n name=f\"data_exc_q{qubit}\", quantities={\"iteration\": \"dimensionless\"}\n )\n shots_results = platform.execute_pulse_sequence(exc_sequence, nshots=niter)[\n \"shots\"\n ][ro_pulse.serial]\n for n in np.arange(niter):\n msr, phase, i, q = shots_results[n]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"iteration[dimensionless]\": n,\n }\n data_exc.add(results)\n yield data_exc\n\n gnd_sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n gnd_sequence.add(ro_pulse)\n\n data_gnd = Dataset(\n name=f\"data_gnd_q{qubit}\", quantities={\"iteration\": \"dimensionless\"}\n )\n\n shots_results = platform.execute_pulse_sequence(gnd_sequence, nshots=niter)[\n \"shots\"\n ][ro_pulse.serial]\n for n in np.arange(niter):\n msr, phase, i, q = shots_results[n]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"iteration[dimensionless]\": n,\n }\n data_gnd.add(results)\n yield data_gnd\n\nsrc/qibocal/calibrations/characterization/allXY.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import Dataset\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit: int,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit: int,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n platform.reload_settings()\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n if platform.resonator_type == \"3D\":\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state0_voltage - state1_voltage\n )\n prob = (2 * prob) - 1\n\n else:\n prob = np.abs(msr * 1e6 - state1_voltage) / (\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": np.array(gateNumber),\n \"beta_param[dimensionless]\": np.array(beta_param),\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit: int,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"})\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data\n yield drag_tunning_fit(\n data,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr1, i1, q1, phase1 = platform.execute_pulse_sequence(seq1, nshots=1024)[\n ro_pulse.serial\n ]\n\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=RY_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi) - Rx(pi/2) - Ro\n seq2 = PulseSequence()\n seq2.add(RY_drag_pulse)\n seq2.add(RX90_drag_pulse)\n seq2.add(ro_pulse)\n msr2, phase2, i2, q2 = platform.execute_pulse_sequence(seq2, nshots=1024)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr1 - msr2,\n \"i[V]\": i1 - i2,\n \"q[V]\": q1 - q2,\n \"phase[deg]\": phase1 - phase2,\n \"beta_param[dimensionless]\": beta_param,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\ndef _get_sequence_from_gate_pair(platform, gates, qubit, beta_param):\n sampling_rate = platform.sampling_rate\n pulse_frequency = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX90_pulse)\n\n if gate == \"RY(pi)\":\n # print(\"Transforming to sequence RY(pi) gate\")\n if beta_param == None:\n RY_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY_pulse)\n\n if gate == \"RY(pi/2)\":\n # print(\"Transforming to sequence RY(pi/2) gate\")\n if beta_param == None:\n RY90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY90_pulse)\n\n sequenceDuration = sequenceDuration + pulse_duration\n pulse_start = pulse_duration\n\n # RO pulse starting just after pair of gates\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=sequenceDuration + 4)\n return sequence, ro_pulse\n\nsrc/qibocal/calibrations/characterization/__init__.py METASEP\n\ndoc/source/conf.py METASEP\n# -*- coding: utf-8 -*-\n# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\nfrom recommonmark.transform import AutoStructify\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nimport qcvv\n\n# -- Project information -----------------------------------------------------\n\nproject = \"qcvv\"\ncopyright = \"2022, The Qibo team\"\nauthor = \"The Qibo team\"\n\n# The full version, including alpha/beta/rc tags\nrelease = qcvv.__version__\n\n\n# -- General configuration ---------------------------------------------------\n\n# https://stackoverflow.com/questions/56336234/build-fail-sphinx-error-contents-rst-not-found\nmaster_doc = \"index\"\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.doctest\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"recommonmark\",\n \"sphinx_markdown_tables\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# Markdown configuration\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\nsource_suffix = {\".rst\": \"restructuredtext\", \".txt\": \"markdown\", \".md\": \"markdown\"}\n\nautosectionlabel_prefix_document = True\n# Allow to embed rst syntax in markdown files.\nenable_eval_rst = True\n\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = []\n\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"sphinx_rtd_theme\"\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = []\n\n# -- Intersphinx -------------------------------------------------------------\n\nintersphinx_mapping = {\"python\": (\"https://docs.python.org/3\", None)}\n\n# -- Autodoc ------------------------------------------------------------------\n#\nautodoc_member_order = \"bysource\"\n\n# Adapted this from\n# https://github.com/readthedocs/recommonmark/blob/ddd56e7717e9745f11300059e4268e204138a6b1/docs/conf.py\n# app setup hook\ndef setup(app):\n app.add_config_value(\"recommonmark_config\", {\"enable_eval_rst\": True}, True)\n app.add_transform(AutoStructify)\n\nsrc/qibocal/web/server.py METASEP\n# -*- coding: utf-8 -*-\nimport os\nimport pathlib\n\nimport yaml\nfrom flask import Flask, render_template\nfrom qcvv.cli.builders import ReportBuilder\n\nfrom qcvv import __version__\n\nserver = Flask(__name__)\n\n\n@server.route(\"/\")\n@server.route(\"/data/\")\ndef page(path=None):\n folders = [\n folder\n for folder in reversed(sorted(os.listdir(os.getcwd())))\n if os.path.isdir(folder) and \"meta.yml\" in os.listdir(folder)\n ]\n\n report = None\n if path is not None:\n try:\n report = ReportBuilder(path)\n except (FileNotFoundError, TypeError):\n pass\n\n return render_template(\n \"template.html\",\n version=__version__,\n folders=folders,\n report=report,\n )\n\nsrc/qibocal/web/report.py METASEP\n# -*- coding: utf-8 -*-\nimport os\nimport pathlib\n\nfrom jinja2 import Environment, FileSystemLoader\nfrom qcvv.cli.builders import ReportBuilder\n\nfrom qcvv import __version__\n\n\ndef create_report(path):\n \"\"\"Creates an HTML report for the data in the given path.\"\"\"\n filepath = pathlib.Path(__file__)\n\n with open(os.path.join(filepath.with_name(\"static\"), \"styles.css\"), \"r\") as file:\n css_styles = f\"\"\n\n report = ReportBuilder(path)\n env = Environment(loader=FileSystemLoader(filepath.with_name(\"templates\")))\n template = env.get_template(\"template.html\")\n\n html = template.render(\n is_static=True,\n css_styles=css_styles,\n version=__version__,\n report=report,\n )\n\n with open(os.path.join(path, \"index.html\"), \"w\") as file:\n file.write(html)\n\nsrc/qibocal/web/app.py METASEP\n# -*- coding: utf-8 -*-\nimport os\n\nimport pandas as pd\nimport yaml\nfrom dash import Dash, Input, Output, dcc, html\nfrom qcvv.data import Dataset\nfrom qcvv.web.server import server\n\nfrom qcvv import plots\n\nDataset() # dummy dataset call to suppress ``pint[V]`` error\n\napp = Dash(\n server=server,\n suppress_callback_exceptions=True,\n)\n\napp.layout = html.Div(\n [\n dcc.Location(id=\"url\", refresh=False),\n dcc.Graph(id=\"graph\", figure={}),\n dcc.Interval(\n id=\"interval\",\n # TODO: Perhaps the user should be allowed to change the refresh rate\n interval=1000,\n n_intervals=0,\n disabled=False,\n ),\n ]\n)\n\n\n@app.callback(\n Output(\"graph\", \"figure\"),\n Input(\"interval\", \"n_intervals\"),\n Input(\"graph\", \"figure\"),\n Input(\"url\", \"pathname\"),\n)\ndef get_graph(n, current_figure, url):\n method, folder, routine, qubit, format = url.split(os.sep)[2:]\n try:\n # data = Dataset.load_data(folder, routine, format, \"precision_sweep\")\n # with open(f\"{folder}/platform.yml\", \"r\") as f:\n # nqubits = yaml.safe_load(f)[\"nqubits\"]\n # if len(data) > 2:\n # params, fit = resonator_spectroscopy_fit(folder, format, nqubits)\n # else:\n # params, fit = None, None\n # return getattr(plots.resonator_spectroscopy, method)(data, params, fit)\n\n # # FIXME: Temporarily hardcode the plotting method to test\n # # multiple routines with different names in one folder\n # # should be changed to:\n # # return getattr(getattr(plots, routine), method)(data)\n\n return getattr(plots, method)(folder, routine, qubit, format)\n except (FileNotFoundError, pd.errors.EmptyDataError):\n return current_figure\n\nsrc/qibocal/web/__init__.py METASEP\n\nsrc/qibocal/tests/test_data.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Some tests for the Dataset class\"\"\"\nimport tempfile\n\nimport numpy as np\nimport pytest\nfrom pint import DimensionalityError, UndefinedUnitError\n\nfrom qibocal.data import Dataset\n\n\ndef random_dataset(length):\n data = Dataset()\n for _ in range(length):\n msr, i, q, phase = np.random.rand(len(data.df.columns))\n data.add({\"MSR[V]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n return data\n\n\ndef test_data_initialization():\n \"\"\"Test Dataset constructor\"\"\"\n data = Dataset()\n assert len(data.df.columns) == 4\n assert list(data.df.columns) == [\"MSR\", \"i\", \"q\", \"phase\"]\n\n data1 = Dataset(quantities={\"attenuation\": \"dB\"})\n assert len(data1.df.columns) == 5\n assert list(data1.df.columns) == [\"attenuation\", \"MSR\", \"i\", \"q\", \"phase\"]\n\n\ndef test_units():\n \"\"\"Test units of measure in Dataset\"\"\"\n data = Dataset()\n assert data.df.MSR.values.units == \"volt\"\n\n data1 = Dataset(quantities={\"frequency\": \"Hz\"})\n assert data1.df.frequency.values.units == \"hertz\"\n\n with pytest.raises(UndefinedUnitError):\n data2 = Dataset(quantities={\"fake_unit\": \"fake\"})\n\n\ndef test_add():\n \"\"\"Test add method of Dataset\"\"\"\n data = random_dataset(5)\n assert len(data) == 5\n\n data1 = Dataset(quantities={\"attenuation\": \"dB\"})\n msr, i, q, phase, att = np.random.rand(len(data1.df.columns))\n data1.add(\n {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"attenuation[dB]\": att,\n }\n )\n assert len(data1) == 1\n\n data1.add(\n {\n \"MSR[V]\": 0,\n \"i[V]\": 0.0,\n \"q[V]\": 0.0,\n \"phase[deg]\": 0,\n \"attenuation[dB]\": 1,\n }\n )\n assert len(data1) == 2\n\n data2 = Dataset()\n msr, i, q, phase = np.random.rand(len(data2.df.columns))\n with pytest.raises(DimensionalityError):\n data2.add({\"MSR[dB]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\n with pytest.raises(UndefinedUnitError):\n data2.add({\"MSR[test]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\nsrc/qibocal/plots/scatters.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, Dataset\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, rabi, ramsey\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = Dataset.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = Dataset(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = Dataset.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = Dataset(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.df[\"fit_amplitude\"][0],\n data_fit.df[\"fit_center\"][0],\n data_fit.df[\"fit_sigma\"][0],\n data_fit.df[\"fit_offset\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\n# For Rabi oscillations\ndef time_msr_phase(folder, routine, qubit, format):\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"Time\": \"ns\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"Time\", \"ns\")),\n max(data.get_values(\"Time\", \"ns\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=rabi(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n data_fit.df[\"popt4\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n # add annotation for label[0] -> pi_pulse_duration\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} ns.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n # add annotation for label[0] -> rabi_oscillations_pi_pulse_max_voltage\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # add annotation for label[0] -> rabi_oscillations_pi_pulse_max_voltage\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.1f} ns.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Time (ns)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef gain_msr_phase(folder, routine, qubit, format):\n\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"gain\", \"dimensionless\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"gain\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"gain\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"gain\", \"dimensionless\")),\n max(data.get_values(\"gain\", \"dimensionless\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=rabi(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n data_fit.df[\"popt4\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n # add annotation for label[0] -> pi_pulse_gain\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.1f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Gain (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\n\ndef amplitude_msr_phase(folder, routine, qubit, format):\n\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"amplitude\", \"dimensionless\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"amplitude\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"amplitude\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"amplitude\", \"dimensionless\")),\n max(data.get_values(\"amplitude\", \"dimensionless\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=rabi(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n data_fit.df[\"popt4\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n # add annotation for label[0] -> pi_pulse_gain\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.1f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Amplitude (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\n\n# For Ramsey oscillations\ndef time_msr(folder, routine, qubit, format):\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\"MSR (V)\",),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"wait\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Ramsey\",\n ),\n row=1,\n col=1,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"wait\", \"ns\")),\n max(data.get_values(\"wait\", \"ns\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=ramsey(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n data_fit.df[\"popt4\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} ns\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.3f} Hz\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\n\n# T1\ndef t1_time_msr_phase(folder, routine, qubit, format):\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"Time\": \"ns\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"T1\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"T1\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"Time\", \"ns\")),\n max(data.get_values(\"Time\", \"ns\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=exp(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} ns.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Time (ns)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\n# Flipping\ndef flips_msr_phase(folder, routine, qubit, format):\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(quantities={\"flips\": \"dimensionless\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"flips\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Flipping MSR\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"flips\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Flipping Phase\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"flips\", \"dimensionless\")),\n max(data.get_values(\"flips\", \"dimensionless\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=flipping(\n timerange,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.4f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Flips (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Flips (dimensionless)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\n# For calibrate qubit states\ndef exc_gnd(folder, routine, qubit, format):\n\n try:\n data_exc = Dataset.load_data(folder, routine, format, f\"data_exc_q{qubit}\")\n except:\n data_exc = Dataset(quantities={\"iteration\": \"dimensionless\"})\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\"Calibrate qubit states\",),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_exc.get_values(\"i\", \"V\"),\n y=data_exc.get_values(\"q\", \"V\"),\n name=\"exc_state\",\n mode=\"markers\",\n marker=dict(size=3, color=\"lightcoral\"),\n ),\n row=1,\n col=1,\n )\n\n try:\n data_gnd = Dataset.load_data(folder, routine, format, f\"data_gnd_q{qubit}\")\n except:\n data_gnd = Dataset(quantities={\"iteration\": \"dimensionless\"})\n\n fig.add_trace(\n go.Scatter(\n x=data_gnd.get_values(\"i\", \"V\"),\n y=data_gnd.get_values(\"q\", \"V\"),\n name=\"gnd state\",\n mode=\"markers\",\n marker=dict(size=3, color=\"skyblue\"),\n ),\n row=1,\n col=1,\n )\n\n i_exc = data_exc.get_values(\"i\", \"V\")\n q_exc = data_exc.get_values(\"q\", \"V\")\n\n i_mean_exc = i_exc.mean()\n q_mean_exc = q_exc.mean()\n iq_mean_exc = complex(i_mean_exc, q_mean_exc)\n mod_iq_exc = abs(iq_mean_exc) * 1e6\n\n fig.add_trace(\n go.Scatter(\n x=[i_mean_exc],\n y=[q_mean_exc],\n name=f\" state1_voltage: {mod_iq_exc}
mean_exc_state: {iq_mean_exc}\",\n mode=\"markers\",\n marker=dict(size=10, color=\"red\"),\n ),\n row=1,\n col=1,\n )\n\n i_gnd = data_gnd.get_values(\"i\", \"V\")\n q_gnd = data_gnd.get_values(\"q\", \"V\")\n\n i_mean_gnd = i_gnd.mean()\n q_mean_gnd = q_gnd.mean()\n iq_mean_gnd = complex(i_mean_gnd, q_mean_gnd)\n mod_iq_gnd = abs(iq_mean_gnd) * 1e6\n\n fig.add_trace(\n go.Scatter(\n x=[i_mean_gnd],\n y=[q_mean_gnd],\n name=f\" state0_voltage: {mod_iq_gnd}
mean_gnd_state: {iq_mean_gnd}\",\n mode=\"markers\",\n marker=dict(size=10, color=\"blue\"),\n ),\n row=1,\n col=1,\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"i (V)\",\n yaxis_title=\"q (V)\",\n width=1000,\n )\n\n return fig\n\n\n# allXY\ndef prob_gate(folder, routine, qubit, format):\n\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"}\n )\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(f\"allXY\",),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"gateNumber\", \"dimensionless\"),\n y=data.get_values(\"probability\", \"dimensionless\"),\n mode=\"markers\",\n name=\"Probabilities\",\n ),\n row=1,\n col=1,\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Gate sequence number\",\n yaxis_title=\"Z projection probability of qubit state |o>\",\n )\n return fig\n\n\n# allXY\ndef prob_gate_iteration(folder, routine, qubit, format):\n\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n }\n )\n\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(f\"allXY\",),\n )\n\n gates = len(data.get_values(\"gateNumber\", \"dimensionless\"))\n # print(gates)\n import numpy as np\n\n for n in range(gates // 21):\n data_start = n * 21\n data_end = data_start + 21\n beta_param = np.array(data.get_values(\"beta_param\", \"dimensionless\"))[\n data_start\n ]\n gates = np.array(data.get_values(\"gateNumber\", \"dimensionless\"))[\n data_start:data_end\n ]\n probabilities = np.array(data.get_values(\"probability\", \"dimensionless\"))[\n data_start:data_end\n ]\n c = \"#\" + \"{:06x}\".format(n * 823000)\n fig.add_trace(\n go.Scatter(\n x=gates,\n y=probabilities,\n mode=\"markers+lines\",\n line=dict(color=c),\n name=f\"beta_parameter = {beta_param}\",\n marker_size=16,\n ),\n row=1,\n col=1,\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Gate sequence number\",\n yaxis_title=\"Z projection probability of qubit state |o>\",\n )\n return fig\n\n\n# beta param tuning\ndef msr_beta(folder, routine, qubit, format):\n\n try:\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Dataset()\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.01,\n vertical_spacing=0.01,\n subplot_titles=(f\"beta_param_tuning\",),\n )\n\n c = \"#6597aa\"\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"beta_param\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n line=dict(color=c),\n mode=\"markers\",\n name=\"[Rx(pi/2) - Ry(pi)] - [Ry(pi) - Rx(pi/2)]\",\n ),\n row=1,\n col=1,\n )\n # add fitting traces\n if len(data) > 0 and len(data_fit) > 0:\n beta_param = np.linspace(\n min(data.get_values(\"beta_param\", \"dimensionless\")),\n max(data.get_values(\"beta_param\", \"dimensionless\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=beta_param,\n y=cos(\n beta_param,\n data_fit.df[\"popt0\"][0],\n data_fit.df[\"popt1\"][0],\n data_fit.df[\"popt2\"][0],\n data_fit.df[\"popt3\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.4f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Beta parameter\",\n yaxis_title=\"MSR[uV]\",\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = Dataset.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = Dataset(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = Dataset.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = Dataset(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.df[\"fit_amplitude\"][0],\n data_fit.df[\"fit_center\"][0],\n data_fit.df[\"fit_sigma\"][0],\n data_fit.df[\"fit_offset\"][0],\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 20,\n )\n params = [i for i in list(data_fit_shifted.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit_shifted.df[\"fit_amplitude\"][0],\n data_fit_shifted.df[\"fit_center\"][0],\n data_fit_shifted.df[\"fit_sigma\"][0],\n data_fit_shifted.df[\"fit_offset\"][0],\n ),\n name=\"Fit shifted spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated shifted {params[0]} is {data_fit_shifted.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\nsrc/qibocal/plots/heatmaps.py METASEP\n# -*- coding: utf-8 -*-\nimport os.path\n\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Dataset\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef duration_gain_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"gain\", \"dimensionless\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"gain\", \"dimensionless\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"duration (ns)\",\n yaxis_title=\"gain (dimensionless)\",\n xaxis2_title=\"duration (ns)\",\n yaxis2_title=\"gain (dimensionless)\",\n )\n return fig\n\n\ndef duration_amplitude_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"amplitude\", \"dimensionless\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"amplitude\", \"dimensionless\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"duration (ns)\",\n yaxis_title=\"amplitude (dimensionless)\",\n xaxis2_title=\"duration (ns)\",\n yaxis2_title=\"amplitude (dimensionless)\",\n )\n return fig\n\nsrc/qibocal/plots/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom qibocal.plots.heatmaps import *\nfrom qibocal.plots.scatters import *\n\nsrc/qibocal/fitting/utils.py METASEP\n# -*- coding: utf-8 -*-\nimport re\n\nimport numpy as np\n\n\ndef lorenzian(frequency, amplitude, center, sigma, offset):\n # http://openafox.com/science/peak-function-derivations.html\n return (amplitude / np.pi) * (\n sigma / ((frequency - center) ** 2 + sigma**2)\n ) + offset\n\n\ndef rabi(x, p0, p1, p2, p3, p4):\n # A fit to Superconducting Qubit Rabi Oscillation\n # Offset : p[0]\n # Oscillation amplitude : p[1]\n # Period T : 1/p[2]\n # Phase : p[3]\n # Arbitrary parameter T_2 : 1/p[4]\n return p0 + p1 * np.sin(2 * np.pi * x * p2 + p3) * np.exp(-x * p4)\n\n\ndef ramsey(x, p0, p1, p2, p3, p4):\n # A fit to Superconducting Qubit Rabi Oscillation\n # Offset : p[0]\n # Oscillation amplitude : p[1]\n # DeltaFreq : p[2]\n # Phase : p[3]\n # Arbitrary parameter T_2 : 1/p[4]\n return p0 + p1 * np.sin(2 * np.pi * x * p2 + p3) * np.exp(-x * p4)\n\n\ndef exp(x, *p):\n return p[0] - p[1] * np.exp(-1 * x * p[2])\n\n\ndef flipping(x, p0, p1, p2, p3):\n # A fit to Flipping Qubit oscillation\n # Epsilon?? shoule be Amplitude : p[0]\n # Offset : p[1]\n # Period of oscillation : p[2]\n # phase for the first point corresponding to pi/2 rotation : p[3]\n return np.sin(x * 2 * np.pi / p2 + p3) * p0 + p1\n\n\ndef cos(x, p0, p1, p2, p3):\n # Offset : p[0]\n # Amplitude : p[1]\n # Period : p[2]\n # Phase : p[3]\n return p0 + p1 * np.cos(2 * np.pi * x / p2 + p3)\n\n\ndef parse(key):\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n return name, unit\n\nsrc/qibocal/fitting/methods.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n\n\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n ],\n )\n\n beta_params = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n 0, # Offset: p[0]\n beta_params.values[np.argmax(voltages)]\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\n 4, # Period: p[2]\n 0.3, # Phase: p[3]\n ]\n\n try:\n popt, pcov = curve_fit(cos, beta_params.values, voltages.values)\n smooth_dataset = cos(beta_params.values, popt[0], popt[1], popt[2], popt[3])\n beta_optimal = beta_params.values[np.argmin(smooth_dataset)]\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: beta_optimal,\n }\n )\n return data_fit\n\nsrc/qibocal/fitting/__init__.py METASEP\n\nsrc/qibocal/cli/builders.py METASEP\n# -*- coding: utf-8 -*-\nimport datetime\nimport inspect\nimport os\nimport shutil\n\nimport yaml\n\nfrom qibocal import calibrations\nfrom qibocal.config import log, raise_error\nfrom qibocal.data import Data\n\n\ndef load_yaml(path):\n \"\"\"Load yaml file from disk.\"\"\"\n with open(path, \"r\") as file:\n data = yaml.safe_load(file)\n return data\n\n\nclass ActionBuilder:\n \"\"\"Class for parsing and executing runcards.\n Args:\n runcard (path): path containing the runcard.\n folder (path): path for the output folder.\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n\n def __init__(self, runcard, folder=None, force=False):\n path, self.folder = self._generate_output_folder(folder, force)\n self.runcard = load_yaml(runcard)\n # Qibolab default backend if not provided in runcard.\n backend_name = self.runcard.get(\"backend\", \"qibolab\")\n platform_name = self.runcard.get(\"platform\", \"dummy\")\n self.backend, self.platform = self._allocate_backend(\n backend_name, platform_name\n )\n self.qubits = self.runcard[\"qubits\"]\n self.format = self.runcard[\"format\"]\n\n # Saving runcard\n self.save_runcards(path, runcard, platform_name)\n self.save_meta(path, self.folder)\n\n @staticmethod\n def _generate_output_folder(folder, force):\n \"\"\"Static method for generating the output folder.\n Args:\n folder (path): path for the output folder. If None it will be created a folder automatically\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n if folder is None:\n import getpass\n\n e = datetime.datetime.now()\n user = getpass.getuser().replace(\".\", \"-\")\n date = e.strftime(\"%Y-%m-%d\")\n folder = f\"{date}-{'000'}-{user}\"\n num = 0\n while os.path.exists(folder):\n log.warning(f\"Directory {folder} already exists.\")\n num += 1\n folder = f\"{date}-{str(num).rjust(3, '0')}-{user}\"\n log.warning(f\"Trying to create directory {folder}\")\n elif os.path.exists(folder) and not force:\n raise_error(RuntimeError, f\"Directory {folder} already exists.\")\n elif os.path.exists(folder) and force:\n log.warning(f\"Deleting previous directory {folder}.\")\n shutil.rmtree(os.path.join(os.getcwd(), folder))\n\n path = os.path.join(os.getcwd(), folder)\n log.info(f\"Creating directory {folder}.\")\n os.makedirs(path)\n return path, folder\n\n def _allocate_backend(self, backend_name, platform_name):\n \"\"\"Allocate the platform using Qibolab.\"\"\"\n from qibo.backends import GlobalBackend, set_backend\n from qibolab.platform import Platform\n from qibolab.platforms.abstract import AbstractPlatform\n\n set_backend(backend=backend_name, platform=platform_name)\n backend = GlobalBackend()\n\n if backend_name == \"qibolab\":\n platform = backend.platform\n else:\n platform = None\n\n return backend, platform\n\n def save_runcards(self, path, runcard, platform_name):\n \"\"\"Save the output runcards.\"\"\"\n shutil.copy(runcard, f\"{path}/runcard.yml\")\n if self.platform is not None:\n from qibolab.paths import qibolab_folder\n\n platform_runcard = qibolab_folder / \"runcards\" / f\"{platform_name}.yml\"\n shutil.copy(platform_runcard, f\"{path}/platform.yml\")\n\n def save_meta(self, path, folder):\n import qibocal\n\n e = datetime.datetime.now(datetime.timezone.utc)\n meta = {}\n meta[\"title\"] = folder\n meta[\"backend\"] = str(self.backend)\n meta[\"platform\"] = str(self.backend.platform)\n meta[\"date\"] = e.strftime(\"%Y-%m-%d\")\n meta[\"start-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"versions\"] = self.backend.versions\n meta[\"versions\"][\"qibocal\"] = qibocal.__version__\n\n with open(f\"{path}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n def _build_single_action(self, name):\n \"\"\"Helper method to parse the actions in the runcard.\"\"\"\n f = getattr(calibrations, name)\n path = os.path.join(self.folder, f\"data/{name}/\")\n os.makedirs(path)\n sig = inspect.signature(f)\n params = self.runcard[\"actions\"][name]\n for param in list(sig.parameters)[2:-1]:\n if param not in params:\n raise_error(AttributeError, f\"Missing parameter {param} in runcard.\")\n if f.__annotations__[\"qubit\"] == int:\n single_qubit_action = True\n else:\n single_qubit_action = False\n\n return f, params, path, single_qubit_action\n\n def execute(self):\n \"\"\"Method to execute sequentially all the actions in the runcard.\"\"\"\n if self.platform is not None:\n self.platform.connect()\n self.platform.setup()\n self.platform.start()\n\n for action in self.runcard[\"actions\"]:\n routine, args, path, single_qubit_action = self._build_single_action(action)\n self._execute_single_action(routine, args, path, single_qubit_action)\n\n if self.platform is not None:\n self.platform.stop()\n self.platform.disconnect()\n\n def _execute_single_action(self, routine, arguments, path, single_qubit):\n \"\"\"Method to execute a single action and retrieving the results.\"\"\"\n if self.format is None:\n raise_error(ValueError, f\"Cannot store data using {self.format} format.\")\n if single_qubit:\n for qubit in self.qubits:\n results = routine(self.platform, qubit, **arguments)\n\n for data in results:\n getattr(data, f\"to_{self.format}\")(path)\n\n if self.platform is not None:\n self.update_platform_runcard(qubit, routine.__name__)\n else:\n results = routine(self.platform, self.qubits, **arguments)\n\n for data in results:\n getattr(data, f\"to_{self.format}\")(path)\n\n if self.platform is not None:\n self.update_platform_runcard(qubit, routine.__name__)\n\n def update_platform_runcard(self, qubit, routine):\n\n try:\n data_fit = Data.load_data(\n self.folder, routine, self.format, f\"fit_q{qubit}\"\n )\n except:\n data_fit = Data()\n\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n settings = load_yaml(f\"{self.folder}/platform.yml\")\n\n for param in params:\n settings[\"characterization\"][\"single_qubit\"][qubit][param] = int(\n data_fit.df[param][0]\n )\n\n with open(f\"{self.folder}/data/{routine}/platform.yml\", \"a+\") as file:\n yaml.dump(\n settings, file, sort_keys=False, indent=4, default_flow_style=None\n )\n\n def dump_report(self):\n from qibocal.web.report import create_report\n\n # update end time\n meta = load_yaml(f\"{self.folder}/meta.yml\")\n e = datetime.datetime.now(datetime.timezone.utc)\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n with open(f\"{self.folder}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n create_report(self.folder)\n\n\nclass ReportBuilder:\n \"\"\"Parses routines and plots to report and live plotting page.\n\n Args:\n path (str): Path to the data folder to generate report for.\n \"\"\"\n\n def __init__(self, path):\n self.path = path\n self.metadata = load_yaml(os.path.join(path, \"meta.yml\"))\n\n # find proper path title\n base, self.title = os.path.join(os.getcwd(), path), \"\"\n while self.title in (\"\", \".\"):\n base, self.title = os.path.split(base)\n\n self.runcard = load_yaml(os.path.join(path, \"runcard.yml\"))\n self.format = self.runcard.get(\"format\")\n self.qubits = self.runcard.get(\"qubits\")\n\n # create calibration routine objects\n # (could be incorporated to :meth:`qibocal.cli.builders.ActionBuilder._build_single_action`)\n self.routines = []\n for action in self.runcard.get(\"actions\"):\n if hasattr(calibrations, action):\n routine = getattr(calibrations, action)\n else:\n raise_error(ValueError, f\"Undefined action {action} in report.\")\n\n if not hasattr(routine, \"plots\"):\n routine.plots = []\n self.routines.append(routine)\n\n def get_routine_name(self, routine):\n \"\"\"Prettify routine's name for report headers.\"\"\"\n return routine.__name__.replace(\"_\", \" \").title()\n\n def get_figure(self, routine, method, qubit):\n \"\"\"Get html figure for report.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n import tempfile\n\n figure = method(self.path, routine.__name__, qubit, self.format)\n with tempfile.NamedTemporaryFile() as temp:\n figure.write_html(temp.name, include_plotlyjs=False, full_html=False)\n fightml = temp.read().decode(\"utf-8\")\n return fightml\n\n def get_live_figure(self, routine, method, qubit):\n \"\"\"Get url to dash page for live plotting.\n\n This url is used by :meth:`qibocal.web.app.get_graph`.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n return os.path.join(\n method.__name__,\n self.path,\n routine.__name__,\n str(qubit),\n self.format,\n )\n\nsrc/qibocal/cli/_base.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Adds global CLI options.\"\"\"\nimport base64\nimport pathlib\nimport shutil\nimport socket\nimport subprocess\nimport uuid\nfrom urllib.parse import urljoin\n\nimport click\nfrom qibo.config import log, raise_error\n\nfrom qibocal.cli.builders import ActionBuilder\n\nCONTEXT_SETTINGS = dict(help_option_names=[\"-h\", \"--help\"])\n\n# options for report upload\nUPLOAD_HOST = (\n \"qcvv@localhost\"\n if socket.gethostname() == \"saadiyat\"\n else \"qcvv@login.qrccluster.com\"\n)\nTARGET_DIR = \"qcvv-reports/\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"runcard\", metavar=\"RUNCARD\", type=click.Path(exists=True))\n@click.option(\n \"folder\",\n \"-o\",\n type=click.Path(),\n help=\"Output folder. If not provided a standard name will generated.\",\n)\n@click.option(\n \"force\",\n \"-f\",\n is_flag=True,\n help=\"Use --force option to overwrite the output folder.\",\n)\ndef command(runcard, folder, force=None):\n\n \"\"\"qibocal: Quantum Calibration Verification and Validation using Qibo.\n\n Arguments:\n\n - RUNCARD: runcard with declarative inputs.\n \"\"\"\n\n builder = ActionBuilder(runcard, folder, force)\n builder.execute()\n builder.dump_report()\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.option(\n \"port\",\n \"-p\",\n \"--port\",\n default=8050,\n type=int,\n help=\"Localhost port to launch dash server.\",\n)\n@click.option(\n \"debug\",\n \"-d\",\n \"--debug\",\n is_flag=True,\n help=\"Launch server in debugging mode.\",\n)\ndef live_plot(port, debug):\n \"\"\"Real time plotting of calibration data on a dash server.\"\"\"\n import socket\n\n from qibocal.web.app import app\n\n # change port if it is already used\n while True:\n with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n if s.connect_ex((\"localhost\", port)) != 0:\n break\n port += 1\n\n app.run_server(debug=debug, port=port)\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"output_folder\", metavar=\"FOLDER\", type=click.Path(exists=True))\ndef upload(output_folder):\n \"\"\"Uploads output folder to server\"\"\"\n\n output_path = pathlib.Path(output_folder)\n\n # check the rsync command exists.\n if not shutil.which(\"rsync\"):\n raise_error(\n RuntimeError,\n \"Could not find the rsync command. Please make sure it is installed.\",\n )\n\n # check that we can authentica with a certificate\n ssh_command_line = (\n \"ssh\",\n \"-o\",\n \"PreferredAuthentications=publickey\",\n \"-q\",\n UPLOAD_HOST,\n \"exit\",\n )\n\n str_line = \" \".join(repr(ele) for ele in ssh_command_line)\n\n log.info(f\"Checking SSH connection to {UPLOAD_HOST}.\")\n\n try:\n subprocess.run(ssh_command_line, check=True)\n except subprocess.CalledProcessError as e:\n raise RuntimeError(\n (\n \"Could not validate the SSH key. \"\n \"The command\\n%s\\nreturned a non zero exit status. \"\n \"Please make sure that your public SSH key is on the server.\"\n )\n % str_line\n ) from e\n except OSError as e:\n raise RuntimeError(\n \"Could not run the command\\n{}\\n: {}\".format(str_line, e)\n ) from e\n\n log.info(\"Connection seems OK.\")\n\n # upload output\n randname = base64.urlsafe_b64encode(uuid.uuid4().bytes).decode()\n newdir = TARGET_DIR + randname\n\n rsync_command = (\n \"rsync\",\n \"-aLz\",\n \"--chmod=ug=rwx,o=rx\",\n f\"{output_path}/\",\n f\"{UPLOAD_HOST}:{newdir}\",\n )\n\n log.info(f\"Uploading output ({output_path}) to {UPLOAD_HOST}\")\n try:\n subprocess.run(rsync_command, check=True)\n except subprocess.CalledProcessError as e:\n msg = f\"Failed to upload output: {e}\"\n raise RuntimeError(msg) from e\n\n url = urljoin(ROOT_URL, randname)\n log.info(f\"Upload completed. The result is available at:\\n{url}\")\n\nsrc/qibocal/cli/__init__.py METASEP\n# -*- coding: utf-8 -*-\n\n\"\"\"CLI entry point.\"\"\"\nfrom ._base import command, live_plot, upload\n\nsrc/qibocal/calibrations/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom qibocal.calibrations.characterization.allXY import *\nfrom qibocal.calibrations.characterization.calibrate_qubit_states import *\nfrom qibocal.calibrations.characterization.flipping import *\nfrom qibocal.calibrations.characterization.qubit_spectroscopy import *\nfrom qibocal.calibrations.characterization.rabi_oscillations import *\nfrom qibocal.calibrations.characterization.ramsey import *\nfrom qibocal.calibrations.characterization.resonator_spectroscopy import *\nfrom qibocal.calibrations.characterization.t1 import *\nfrom qibocal.calibrations.protocols.test import *\n\nserverscripts/qcvv-update-on-change.py METASEP\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport argparse\n\nimport curio\nimport inotify.adapters\nimport inotify.constants\nfrom curio import subprocess\n\n\nasync def main(folder, exe_args):\n i = inotify.adapters.Inotify()\n i.add_watch(folder)\n\n for event in i.event_gen(yield_nones=False):\n if event is not None:\n (header, _, _, _) = event\n if (\n (header.mask & inotify.constants.IN_CREATE)\n or (header.mask & inotify.constants.IN_DELETE)\n or (header.mask & inotify.constants.IN_MODIFY)\n ):\n await subprocess.run(exe_args)\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\"folder\")\n parser.add_argument(\"exe_args\", nargs=\"+\")\n args = parser.parse_args()\n curio.run(main(args.folder, args.exe_args))\n\nserverscripts/qcvv-index.reports.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"qcvv-index-reports.py\nGenerates a JSON index with reports information.\n\"\"\"\nimport json\nimport pathlib\nimport sys\nfrom collections import ChainMap\n\nimport yaml\n\nROOT = \"/home/users/qcvv/qcvv-reports\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\nOUT = \"/home/users/qcvv/qcvv-reports/index.json\"\nDEFAULTS = {\n \"title\": \"-\",\n \"date\": \"-\",\n \"platform\": \"-\",\n \"start-time\": \"-\",\n \"end-time\": \"-\",\n}\nREQUIRED_FILE_METADATA = {\"title\", \"date\", \"platform\", \"start-time\" \"end-time\"}\n\n\ndef meta_from_path(p):\n meta = ChainMap(DEFAULTS)\n yaml_meta = p / \"meta.yml\"\n yaml_res = {}\n if yaml_meta.exists():\n with yaml_meta.open() as f:\n try:\n yaml_res = yaml.safe_load(f)\n except yaml.YAMLError as e:\n print(f\"Error processing {yaml_meta}: {e}\", file=sys.stderr)\n meta = meta.new_child(yaml_res)\n return meta\n\n\ndef register(p):\n path_meta = meta_from_path(p)\n title, date, platform, start_time, end_time = (\n path_meta[\"title\"],\n path_meta[\"date\"],\n path_meta[\"platform\"],\n path_meta[\"start-time\"],\n path_meta[\"end-time\"],\n )\n url = ROOT_URL + p.name\n titlelink = f'{title}'\n return (titlelink, date, platform, start_time, end_time)\n\n\ndef make_index():\n root_path = pathlib.Path(ROOT)\n data = []\n for p in root_path.iterdir():\n if p.is_dir():\n try:\n res = register(p)\n data.append(res)\n except:\n print(\"Error processing folder\", p, file=sys.stderr)\n raise\n\n with open(OUT, \"w\") as f:\n json.dump({\"data\": data}, f)\n\n\nif __name__ == \"__main__\":\n make_index()\n\nsrc/qibocal/decorators.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Decorators implementation.\"\"\"\nimport os\n\nfrom qibocal.config import raise_error\n\n\ndef plot(header, method):\n \"\"\"Decorator for adding plots in the report and live plotting page.\n\n Args:\n header (str): Header of the plot to use in the report.\n method (Callable): Plotting method defined under ``qibocal.plots``.\n \"\"\"\n\n def wrapped(f):\n if hasattr(f, \"plots\"):\n # insert in the beginning of the list to have\n # proper plot ordering in the report\n f.plots.insert(0, (header, method))\n else:\n f.plots = [(header, method)]\n return f\n\n return wrapped\n\nsrc/qibocal/data.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Implementation of Dataset class to store measurements.\"\"\"\n\nimport re\nfrom abc import abstractmethod\n\nimport pandas as pd\nimport pint_pandas\n\nfrom qibocal.config import raise_error\n\n\nclass AbstractDataset:\n def __init__(self, name=None):\n\n if name is None:\n self.name = \"data\"\n else:\n self.name = name\n\n self.df = pd.DataFrame()\n\n def __add__(self, data):\n self.df = pd.concat([self.df, data.df], ignore_index=True)\n return self\n\n @abstractmethod\n def add(self, data):\n raise_error(NotImplementedError)\n\n def __len__(self):\n \"\"\"Computes the length of the dataset.\"\"\"\n return len(self.df)\n\n @abstractmethod\n def load_data(cls, folder, routine, format, name):\n raise_error(NotImplementedError)\n\n @abstractmethod\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n if self.quantities == None:\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n else:\n self.df.pint.dequantify().to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\n\nclass Dataset(AbstractDataset):\n \"\"\"Class to store the data measured during the calibration routines.\n It is a wrapper to a pandas DataFrame with units of measure from the Pint\n library.\n\n Args:\n quantities (dict): dictionary containing additional quantities that the user\n may save other than the pulse sequence output. The keys are the name of the\n quantities and the corresponding values are the units of measure.\n \"\"\"\n\n def __init__(self, name=None, quantities=None):\n\n super().__init__(name=name)\n\n self.df = pd.DataFrame(\n {\n \"MSR\": pd.Series(dtype=\"pint[V]\"),\n \"i\": pd.Series(dtype=\"pint[V]\"),\n \"q\": pd.Series(dtype=\"pint[V]\"),\n \"phase\": pd.Series(dtype=\"pint[deg]\"),\n }\n )\n self.quantities = {\"MSR\": \"V\", \"i\": \"V\", \"q\": \"V\", \"phase\": \"deg\"}\n\n if quantities is not None:\n self.quantities.update(quantities)\n for name, unit in quantities.items():\n self.df.insert(0, name, pd.Series(dtype=f\"pint[{unit}]\"))\n\n from pint import UnitRegistry\n\n self.ureg = UnitRegistry()\n\n def add(self, data):\n \"\"\"Add a row to dataset.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n for key, value in data.items():\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n # TODO: find a better way to do this\n self.df.loc[l, name] = value * self.ureg(unit)\n\n def get_values(self, quantity, unit):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n unit (str): Unit of the returned values.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n return self.df[quantity].pint.to(unit).pint.magnitude\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n dataset (``Dataset``): dataset object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file, header=[0, 1])\n obj.df = obj.df.pint.quantify(level=-1)\n obj.df.pop(\"Unnamed: 0_level_0\")\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.pint.dequantify().to_csv(f\"{path}/{self.name}.csv\")\n\n\nclass Data(AbstractDataset):\n \"\"\"Class to store the data obtained from calibration routines.\n It is a wrapper to a pandas DataFrame.\n\n Args:\n quantities (dict): dictionary quantities to be saved.\n \"\"\"\n\n def __init__(self, name=None, quantities=None):\n\n super().__init__(name=name)\n\n if quantities is not None:\n self.quantities = quantities\n for name in quantities:\n self.df.insert(0, name, pd.Series(dtype=object))\n\n def add(self, data):\n \"\"\"Add a row to dataset.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n for key, value in data.items():\n self.df.loc[l, key] = value\n\n def get_values(self, quantity):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n return self.df[quantity]\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n dataset (``Dataset``): dataset object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file)\n obj.df.pop(\"Unnamed: 0\")\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\nsrc/qibocal/config.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Custom logger implemenation.\"\"\"\nimport logging\nimport os\n\n# Logging level from 0 (all) to 4 (errors) (see https://docs.python.org/3/library/logging.html#logging-levels)\nQIBOCAL_LOG_LEVEL = 1\nif \"QIBOCAL_LOG_LEVEL\" in os.environ: # pragma: no cover\n QIBOCAL_LOG_LEVEL = 10 * int(os.environ.get(\"QIBOCAL_LOG_LEVEL\"))\n\n\ndef raise_error(exception, message=None, args=None):\n \"\"\"Raise exception with logging error.\n\n Args:\n exception (Exception): python exception.\n message (str): the error message.\n \"\"\"\n log.error(message)\n if args:\n raise exception(message, args)\n else:\n raise exception(message)\n\n\n# Configuration for logging mechanism\nclass CustomHandler(logging.StreamHandler):\n \"\"\"Custom handler for logging algorithm.\"\"\"\n\n def format(self, record):\n \"\"\"Format the record with specific format.\"\"\"\n from qibocal import __version__\n\n fmt = f\"[Qibocal {__version__}|%(levelname)s|%(asctime)s]: %(message)s\"\n return logging.Formatter(fmt, datefmt=\"%Y-%m-%d %H:%M:%S\").format(record)\n\n\n# allocate logger object\nlog = logging.getLogger(__name__)\nlog.setLevel(QIBOCAL_LOG_LEVEL)\nlog.addHandler(CustomHandler())\n\nsrc/qibocal/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom .cli import command, live_plot, upload\n\n\"\"\"qibocal: Quantum Calibration Verification and Validation using Qibo.\"\"\"\nimport importlib.metadata as im\n\n__version__ = im.version(__package__)\n\nsrc/qcvv/fitting/methods.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n\n\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n ],\n )\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n\n\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n ],\n )\n\n beta_params = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n 0, # Offset: p[0]\n beta_params.values[np.argmax(voltages)]\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\n 4, # Period: p[2]\n 0.3, # Phase: p[3]\n ]\n\n try:","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n\n\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n ],\n )\n\n beta_params = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n 0, # Offset: p[0]\n beta_params.values[np.argmax(voltages)]\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\n 4, # Period: p[2]\n 0.3, # Phase: p[3]\n ]\n\n try:\n popt, pcov = curve_fit(cos, beta_params.values, voltages.values)","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)","type":"random"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n\n\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n ],\n )\n\n beta_params = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n 0, # Offset: p[0]\n beta_params.values[np.argmax(voltages)]\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\n 4, # Period: p[2]\n 0.3, # Phase: p[3]\n ]\n\n try:\n popt, pcov = curve_fit(cos, beta_params.values, voltages.values)\n smooth_dataset = cos(beta_params.values, popt[0], popt[1], popt[2], popt[3])\n beta_optimal = beta_params.values[np.argmin(smooth_dataset)]\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(","type":"random"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n","type":"random"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\n t1 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\n labels[2]: t1,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",","type":"random"},{"content":"# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]","type":"random"}],"string":"[\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\\n )\\n\\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\\n )\\n\\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\\n else:\\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\\n\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\\n )\\n\\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\\n else:\\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\\n\\n try:\\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\\n epsilon = -np.pi / popt[2]\\n amplitude_delta = np.pi / (np.pi + epsilon)\\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n labels[0]: amplitude_delta,\\n labels[1]: corrected_amplitude,\\n }\\n )\\n return data_fit\\n\\n\\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n ],\\n )\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\\n )\\n\\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\\n else:\\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\\n\\n try:\\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\\n epsilon = -np.pi / popt[2]\\n amplitude_delta = np.pi / (np.pi + epsilon)\\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n labels[0]: amplitude_delta,\\n labels[1]: corrected_amplitude,\\n }\\n )\\n return data_fit\\n\\n\\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n ],\\n )\\n\\n beta_params = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n 0, # Offset: p[0]\\n beta_params.values[np.argmax(voltages)]\\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\\n 4, # Period: p[2]\\n 0.3, # Phase: p[3]\\n ]\\n\\n try:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\\n )\\n\\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\\n else:\\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\\n\\n try:\\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\\n epsilon = -np.pi / popt[2]\\n amplitude_delta = np.pi / (np.pi + epsilon)\\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n labels[0]: amplitude_delta,\\n labels[1]: corrected_amplitude,\\n }\\n )\\n return data_fit\\n\\n\\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n ],\\n )\\n\\n beta_params = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n 0, # Offset: p[0]\\n beta_params.values[np.argmax(voltages)]\\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\\n 4, # Period: p[2]\\n 0.3, # Phase: p[3]\\n ]\\n\\n try:\\n popt, pcov = curve_fit(cos, beta_params.values, voltages.values)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\\n )\\n\\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\\n else:\\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\\n\\n try:\\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\\n epsilon = -np.pi / popt[2]\\n amplitude_delta = np.pi / (np.pi + epsilon)\\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n labels[0]: amplitude_delta,\\n labels[1]: corrected_amplitude,\\n }\\n )\\n return data_fit\\n\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\\n )\\n\\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 500e-9,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n delta_fitting = popt[2]\\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\\n corrected_qubit_frequency = int(qubit_freq - delta_phys)\\n t2 = 1.0 / popt[4]\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: delta_phys,\\n labels[1]: corrected_qubit_frequency,\\n labels[2]: t2,\\n }\\n )\\n return data_fit\\n\\n\\ndef t1_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n labels[0],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n max(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n else:\\n pguess = [\\n min(voltages.values),\\n (max(voltages.values) - min(voltages.values)),\\n 1 / 250,\\n ]\\n\\n try:\\n popt, pcov = curve_fit(\\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\\n )\\n t1 = abs(1 / popt[2])\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n labels[0]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n labels[1],\\n ],\\n )\\n\\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\\n else:\\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\\n\\n try:\\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\\n epsilon = -np.pi / popt[2]\\n amplitude_delta = np.pi / (np.pi + epsilon)\\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n labels[0]: amplitude_delta,\\n labels[1]: corrected_amplitude,\\n }\\n )\\n return data_fit\\n\\n\\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n labels[0],\\n ],\\n )\\n\\n beta_params = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n pguess = [\\n 0, # Offset: p[0]\\n beta_params.values[np.argmax(voltages)]\\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\\n 4, # Period: p[2]\\n 0.3, # Phase: p[3]\\n ]\\n\\n try:\\n popt, pcov = curve_fit(cos, beta_params.values, voltages.values)\\n smooth_dataset = cos(beta_params.values, popt[0], popt[1], popt[2], popt[3])\\n beta_optimal = beta_params.values[np.argmin(smooth_dataset)]\\n\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n try:\\n popt, pcov = curve_fit(\\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\\n )\\n smooth_dataset = rabi(time.values, *popt)\\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\\n rabi_oscillations_pi_pulse_max_voltage = smooth_dataset.max() * 1e6\\n t1 = 1.0 / popt[4] # double check T1\\n except:\\n log.warning(\\\"The fitting was not succesful\\\")\\n return data_fit\\n\\n data_fit.add(\\n {\\n \\\"popt0\\\": popt[0],\\n \\\"popt1\\\": popt[1],\\n \\\"popt2\\\": popt[2],\\n \\\"popt3\\\": popt[3],\\n \\\"popt4\\\": popt[4],\\n labels[0]: pi_pulse_duration,\\n labels[1]: rabi_oscillations_pi_pulse_max_voltage,\\n labels[2]: t1,\\n }\\n )\\n return data_fit\\n\\n\\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\\n\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\n\\\"\\\"\\\"Routine-specific method for post-processing data acquired.\\\"\\\"\\\"\\nimport lmfit\\nimport numpy as np\\nfrom scipy.optimize import curve_fit\\n\\nfrom qibocal.config import log\\nfrom qibocal.data import Data\\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\\n\\n\\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\\n \\\"\\\"\\\"Fitting routine for resonator spectroscopy\\\"\\\"\\\"\\n if fit_file_name == None:\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n else:\\n data_fit = Data(\\n name=fit_file_name + f\\\"_q{qubit}\\\",\\n quantities=[\\n \\\"fit_amplitude\\\",\\n \\\"fit_center\\\",\\n \\\"fit_sigma\\\",\\n \\\"fit_offset\\\",\\n labels[1],\\n labels[0],\\n ],\\n )\\n\\n frequencies = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n # Create a lmfit model for fitting equation defined in resonator_peak\\n model_Q = lmfit.Model(lorenzian)\\n\\n # Guess parameters for Lorentzian max or min\\n if (nqubits == 1 and labels[0] == \\\"resonator_freq\\\") or (\\n nqubits != 1 and labels[0] == \\\"qubit_freq\\\"\\n ):\\n guess_center = frequencies[\\n np.argmax(voltages)\\n ] # Argmax = Returns the indices of the maximum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n else:\\n guess_center = frequencies[\\n np.argmin(voltages)\\n ] # Argmin = Returns the indices of the minimum values along an axis.\\n guess_offset = np.mean(\\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\\n )\\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\\n\\n # Add guessed parameters to the model\\n model_Q.set_param_hint(\\\"center\\\", value=guess_center, vary=True)\\n model_Q.set_param_hint(\\\"sigma\\\", value=guess_sigma, vary=True)\\n model_Q.set_param_hint(\\\"amplitude\\\", value=guess_amp, vary=True)\\n model_Q.set_param_hint(\\\"offset\\\", value=guess_offset, vary=True)\\n guess_parameters = model_Q.make_params()\\n\\n # fit the model with the data and guessed parameters\\n try:\\n fit_res = model_Q.fit(\\n data=voltages, frequency=frequencies, params=guess_parameters\\n )\\n except:\\n log.warning(\\\"The fitting was not successful\\\")\\n return data_fit\\n\\n # get the values for postprocessing and for legend.\\n f0 = fit_res.best_values[\\\"center\\\"]\\n BW = fit_res.best_values[\\\"sigma\\\"] * 2\\n Q = abs(f0 / BW)\\n peak_voltage = (\\n fit_res.best_values[\\\"amplitude\\\"] / (fit_res.best_values[\\\"sigma\\\"] * np.pi)\\n + fit_res.best_values[\\\"offset\\\"]\\n )\\n\\n freq = f0 * 1e6\\n\\n data_fit.add(\\n {\\n labels[1]: peak_voltage,\\n labels[0]: freq,\\n \\\"fit_amplitude\\\": fit_res.best_values[\\\"amplitude\\\"],\\n \\\"fit_center\\\": fit_res.best_values[\\\"center\\\"],\\n \\\"fit_sigma\\\": fit_res.best_values[\\\"sigma\\\"],\\n \\\"fit_offset\\\": fit_res.best_values[\\\"offset\\\"],\\n }\\n )\\n return data_fit\\n\\n\\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\\n data_fit = Data(\\n name=f\\\"fit_q{qubit}\\\",\\n quantities=[\\n \\\"popt0\\\",\\n \\\"popt1\\\",\\n \\\"popt2\\\",\\n \\\"popt3\\\",\\n \\\"popt4\\\",\\n labels[0],\\n labels[1],\\n labels[2],\\n ],\\n )\\n\\n time = data.get_values(*parse(x))\\n voltages = data.get_values(*parse(y))\\n\\n if nqubits == 1:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmin(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\\n else:\\n pguess = [\\n np.mean(voltages.values),\\n np.max(voltages.values) - np.min(voltages.values),\\n 0.5 / time.values[np.argmax(voltages.values)],\\n np.pi / 2,\\n 0.1e-6,\\n ]\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":[" data_fit.add("," labels[0]: t1,"," data_fit = Data("," rabi, time.values, voltages.values, p0=pguess, maxfev=10000"," smooth_dataset = rabi(time.values, *popt)"," voltages = data.get_values(*parse(y))"," labels[2]: t1,"," frequencies = data.get_values(*parse(x))"," popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)"," model_Q = lmfit.Model(lorenzian)"," time = data.get_values(*parse(x))"," ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000"," beta_params = data.get_values(*parse(x))"," popt, pcov = curve_fit(cos, beta_params.values, voltages.values)"," smooth_dataset = cos(beta_params.values, popt[0], popt[1], popt[2], popt[3])"," exp, time.values, voltages.values, p0=pguess, maxfev=2000000"," t1 = abs(1 / popt[2])",""," # Create a lmfit model for fitting equation defined in resonator_peak"," )"," pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter"," corrected_qubit_frequency = int(qubit_freq - delta_phys)"," {"," else:"," quantities=["," try:"],"string":"[\n \" data_fit.add(\",\n \" labels[0]: t1,\",\n \" data_fit = Data(\",\n \" rabi, time.values, voltages.values, p0=pguess, maxfev=10000\",\n \" smooth_dataset = rabi(time.values, *popt)\",\n \" voltages = data.get_values(*parse(y))\",\n \" labels[2]: t1,\",\n \" frequencies = data.get_values(*parse(x))\",\n \" popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\",\n \" model_Q = lmfit.Model(lorenzian)\",\n \" time = data.get_values(*parse(x))\",\n \" ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\",\n \" beta_params = data.get_values(*parse(x))\",\n \" popt, pcov = curve_fit(cos, beta_params.values, voltages.values)\",\n \" smooth_dataset = cos(beta_params.values, popt[0], popt[1], popt[2], popt[3])\",\n \" exp, time.values, voltages.values, p0=pguess, maxfev=2000000\",\n \" t1 = abs(1 / popt[2])\",\n \"\",\n \" # Create a lmfit model for fitting equation defined in resonator_peak\",\n \" )\",\n \" pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\",\n \" corrected_qubit_frequency = int(qubit_freq - delta_phys)\",\n \" {\",\n \" else:\",\n \" quantities=[\",\n \" try:\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":209,"cells":{"repo_id":{"kind":"number","value":16,"string":"16"},"repo_name":{"kind":"string","value":"qiboteam__qibocal"},"project_context":{"kind":"string","value":"qiboteam__qibocal METASEP\n\ndoc/source/conf.py METASEP\n# -*- coding: utf-8 -*-\n# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\nfrom recommonmark.transform import AutoStructify\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nimport qcvv\n\n# -- Project information -----------------------------------------------------\n\nproject = \"qcvv\"\ncopyright = \"2022, The Qibo team\"\nauthor = \"The Qibo team\"\n\n# The full version, including alpha/beta/rc tags\nrelease = qcvv.__version__\n\n\n# -- General configuration ---------------------------------------------------\n\n# https://stackoverflow.com/questions/56336234/build-fail-sphinx-error-contents-rst-not-found\nmaster_doc = \"index\"\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.doctest\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"recommonmark\",\n \"sphinx_markdown_tables\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# Markdown configuration\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\nsource_suffix = {\".rst\": \"restructuredtext\", \".txt\": \"markdown\", \".md\": \"markdown\"}\n\nautosectionlabel_prefix_document = True\n# Allow to embed rst syntax in markdown files.\nenable_eval_rst = True\n\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = []\n\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"sphinx_rtd_theme\"\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = []\n\n# -- Intersphinx -------------------------------------------------------------\n\nintersphinx_mapping = {\"python\": (\"https://docs.python.org/3\", None)}\n\n# -- Autodoc ------------------------------------------------------------------\n#\nautodoc_member_order = \"bysource\"\n\n# Adapted this from\n# https://github.com/readthedocs/recommonmark/blob/ddd56e7717e9745f11300059e4268e204138a6b1/docs/conf.py\n# app setup hook\ndef setup(app):\n app.add_config_value(\"recommonmark_config\", {\"enable_eval_rst\": True}, True)\n app.add_transform(AutoStructify)\n\nserverscripts/qcvv-update-on-change.py METASEP\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport argparse\n\nimport curio\nimport inotify.adapters\nimport inotify.constants\nfrom curio import subprocess\n\n\nasync def main(folder, exe_args):\n i = inotify.adapters.Inotify()\n i.add_watch(folder)\n\n for event in i.event_gen(yield_nones=False):\n if event is not None:\n (header, _, _, _) = event\n if (\n (header.mask & inotify.constants.IN_CREATE)\n or (header.mask & inotify.constants.IN_DELETE)\n or (header.mask & inotify.constants.IN_MODIFY)\n ):\n await subprocess.run(exe_args)\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\"folder\")\n parser.add_argument(\"exe_args\", nargs=\"+\")\n args = parser.parse_args()\n curio.run(main(args.folder, args.exe_args))\n\nserverscripts/qcvv-index.reports.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"qcvv-index-reports.py\nGenerates a JSON index with reports information.\n\"\"\"\nimport json\nimport pathlib\nimport sys\nfrom collections import ChainMap\n\nimport yaml\n\nROOT = \"/home/users/qcvv/qcvv-reports\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\nOUT = \"/home/users/qcvv/qcvv-reports/index.json\"\nDEFAULTS = {\n \"title\": \"-\",\n \"date\": \"-\",\n \"platform\": \"-\",\n \"start-time\": \"-\",\n \"end-time\": \"-\",\n}\nREQUIRED_FILE_METADATA = {\"title\", \"date\", \"platform\", \"start-time\" \"end-time\"}\n\n\ndef meta_from_path(p):\n meta = ChainMap(DEFAULTS)\n yaml_meta = p / \"meta.yml\"\n yaml_res = {}\n if yaml_meta.exists():\n with yaml_meta.open() as f:\n try:\n yaml_res = yaml.safe_load(f)\n except yaml.YAMLError as e:\n print(f\"Error processing {yaml_meta}: {e}\", file=sys.stderr)\n meta = meta.new_child(yaml_res)\n return meta\n\n\ndef register(p):\n path_meta = meta_from_path(p)\n title, date, platform, start_time, end_time = (\n path_meta[\"title\"],\n path_meta[\"date\"],\n path_meta[\"platform\"],\n path_meta[\"start-time\"],\n path_meta[\"end-time\"],\n )\n url = ROOT_URL + p.name\n titlelink = f'{title}'\n return (titlelink, date, platform, start_time, end_time)\n\n\ndef make_index():\n root_path = pathlib.Path(ROOT)\n data = []\n for p in root_path.iterdir():\n if p.is_dir():\n try:\n res = register(p)\n data.append(res)\n except:\n print(\"Error processing folder\", p, file=sys.stderr)\n raise\n\n with open(OUT, \"w\") as f:\n json.dump({\"data\": data}, f)\n\n\nif __name__ == \"__main__\":\n make_index()\n\nsrc/qcvv/web/server.py METASEP\n# -*- coding: utf-8 -*-\nimport os\nimport pathlib\n\nimport yaml\nfrom flask import Flask, render_template\n\nfrom qcvv import __version__\nfrom qcvv.cli.builders import ReportBuilder\n\nserver = Flask(__name__)\n\n\n@server.route(\"/\")\n@server.route(\"/data/\")\ndef page(path=None):\n folders = [\n folder\n for folder in reversed(sorted(os.listdir(os.getcwd())))\n if os.path.isdir(folder) and \"meta.yml\" in os.listdir(folder)\n ]\n\n report = None\n if path is not None:\n try:\n report = ReportBuilder(path)\n except (FileNotFoundError, TypeError):\n pass\n\n return render_template(\n \"template.html\",\n version=__version__,\n folders=folders,\n report=report,\n )\n\nsrc/qcvv/web/report.py METASEP\n# -*- coding: utf-8 -*-\nimport os\nimport pathlib\n\nfrom jinja2 import Environment, FileSystemLoader\n\nfrom qcvv import __version__\nfrom qcvv.cli.builders import ReportBuilder\n\n\ndef create_report(path):\n \"\"\"Creates an HTML report for the data in the given path.\"\"\"\n filepath = pathlib.Path(__file__)\n\n with open(os.path.join(filepath.with_name(\"static\"), \"styles.css\"), \"r\") as file:\n css_styles = f\"\"\n\n report = ReportBuilder(path)\n env = Environment(loader=FileSystemLoader(filepath.with_name(\"templates\")))\n template = env.get_template(\"template.html\")\n\n html = template.render(\n is_static=True,\n css_styles=css_styles,\n version=__version__,\n report=report,\n )\n\n with open(os.path.join(path, \"index.html\"), \"w\") as file:\n file.write(html)\n\nsrc/qcvv/web/app.py METASEP\n# -*- coding: utf-8 -*-\nimport os\n\nimport pandas as pd\nimport yaml\nfrom dash import Dash, Input, Output, dcc, html\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.web.server import server\n\nDataset() # dummy dataset call to suppress ``pint[V]`` error\n\napp = Dash(\n server=server,\n suppress_callback_exceptions=True,\n)\n\napp.layout = html.Div(\n [\n dcc.Location(id=\"url\", refresh=False),\n dcc.Graph(id=\"graph\", figure={}),\n dcc.Interval(\n id=\"interval\",\n # TODO: Perhaps the user should be allowed to change the refresh rate\n interval=1000,\n n_intervals=0,\n disabled=False,\n ),\n ]\n)\n\n\n@app.callback(\n Output(\"graph\", \"figure\"),\n Input(\"interval\", \"n_intervals\"),\n Input(\"graph\", \"figure\"),\n Input(\"url\", \"pathname\"),\n)\ndef get_graph(n, current_figure, url):\n method, folder, routine, qubit, format = url.split(os.sep)[2:]\n try:\n # data = Dataset.load_data(folder, routine, format, \"precision_sweep\")\n # with open(f\"{folder}/platform.yml\", \"r\") as f:\n # nqubits = yaml.safe_load(f)[\"nqubits\"]\n # if len(data) > 2:\n # params, fit = resonator_spectroscopy_fit(folder, format, nqubits)\n # else:\n # params, fit = None, None\n # return getattr(plots.resonator_spectroscopy, method)(data, params, fit)\n\n # # FIXME: Temporarily hardcode the plotting method to test\n # # multiple routines with different names in one folder\n # # should be changed to:\n # # return getattr(getattr(plots, routine), method)(data)\n\n return getattr(plots, method)(folder, routine, qubit, format)\n except (FileNotFoundError, pd.errors.EmptyDataError):\n return current_figure\n\nsrc/qcvv/web/__init__.py METASEP\n\nsrc/qcvv/tests/test_data.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Some tests for the Dataset class\"\"\"\nimport tempfile\n\nimport numpy as np\nimport pytest\nfrom pint import DimensionalityError, UndefinedUnitError\n\nfrom qcvv.data import Dataset\n\n\ndef random_dataset(length):\n data = Dataset()\n for _ in range(length):\n msr, i, q, phase = np.random.rand(len(data.df.columns))\n data.add({\"MSR[V]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n return data\n\n\ndef test_data_initialization():\n \"\"\"Test Dataset constructor\"\"\"\n data = Dataset()\n assert len(data.df.columns) == 4\n assert list(data.df.columns) == [\"MSR\", \"i\", \"q\", \"phase\"]\n\n data1 = Dataset(quantities={\"attenuation\": \"dB\"})\n assert len(data1.df.columns) == 5\n assert list(data1.df.columns) == [\"attenuation\", \"MSR\", \"i\", \"q\", \"phase\"]\n\n\ndef test_units():\n \"\"\"Test units of measure in Dataset\"\"\"\n data = Dataset()\n assert data.df.MSR.values.units == \"volt\"\n\n data1 = Dataset(quantities={\"frequency\": \"Hz\"})\n assert data1.df.frequency.values.units == \"hertz\"\n\n with pytest.raises(UndefinedUnitError):\n data2 = Dataset(quantities={\"fake_unit\": \"fake\"})\n\n\ndef test_add():\n \"\"\"Test add method of Dataset\"\"\"\n data = random_dataset(5)\n assert len(data) == 5\n\n data1 = Dataset(quantities={\"attenuation\": \"dB\"})\n msr, i, q, phase, att = np.random.rand(len(data1.df.columns))\n data1.add(\n {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"attenuation[dB]\": att,\n }\n )\n assert len(data1) == 1\n\n data1.add(\n {\n \"MSR[V]\": 0,\n \"i[V]\": 0.0,\n \"q[V]\": 0.0,\n \"phase[deg]\": 0,\n \"attenuation[dB]\": 1,\n }\n )\n assert len(data1) == 2\n\n data2 = Dataset()\n msr, i, q, phase = np.random.rand(len(data2.df.columns))\n with pytest.raises(DimensionalityError):\n data2.add({\"MSR[dB]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\n with pytest.raises(UndefinedUnitError):\n data2.add({\"MSR[test]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\nsrc/qcvv/plots/scatters.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qcvv.data import Data, Dataset\nfrom qcvv.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = Dataset.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = Dataset(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = Dataset.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = Dataset(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.df[\"fit_amplitude\"][0],\n data_fit.df[\"fit_center\"][0],\n data_fit.df[\"fit_sigma\"][0],\n data_fit.df[\"fit_offset\"][0],\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\nsrc/qcvv/plots/heatmaps.py METASEP\n# -*- coding: utf-8 -*-\nimport os.path\n\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qcvv.data import Dataset\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = Dataset.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\nsrc/qcvv/plots/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom qcvv.plots.heatmaps import *\nfrom qcvv.plots.scatters import *\n\nsrc/qcvv/fitting/utils.py METASEP\n# -*- coding: utf-8 -*-\nimport re\n\nimport numpy as np\n\n\ndef lorenzian(frequency, amplitude, center, sigma, offset):\n # http://openafox.com/science/peak-function-derivations.html\n return (amplitude / np.pi) * (\n sigma / ((frequency - center) ** 2 + sigma**2)\n ) + offset\n\n\ndef parse(key):\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n return name, unit\n\nsrc/qcvv/fitting/methods.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\n\nfrom qcvv.config import log\nfrom qcvv.data import Data\nfrom qcvv.fitting.utils import lorenzian, parse\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"fit_amplitude\",\n \"fit_center\",\n \"fit_sigma\",\n \"fit_offset\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e6\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"fit_amplitude\": fit_res.best_values[\"amplitude\"],\n \"fit_center\": fit_res.best_values[\"center\"],\n \"fit_sigma\": fit_res.best_values[\"sigma\"],\n \"fit_offset\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n # params = resonator_freq, peak_voltage\n\n # for keys in fit_res.best_values:\n # fit_res.best_values[keys] = float(fit_res.best_values[keys])\n\n # with open(f\"{folder}/data/resonator_spectroscopy/fit.yml\", \"w+\") as file:\n # yaml.dump(\n # fit_res.best_values,\n # file,\n # sort_keys=False,\n # indent=4,\n # default_flow_style=None,\n # )\n\n # return params, fit_res.best_values\n\nsrc/qcvv/fitting/__init__.py METASEP\n\nsrc/qcvv/cli/builders.py METASEP\n# -*- coding: utf-8 -*-\nimport datetime\nimport inspect\nimport os\nimport shutil\n\nimport yaml\n\nfrom qcvv import calibrations\nfrom qcvv.config import log, raise_error\nfrom qcvv.data import Data\n\n\ndef load_yaml(path):\n \"\"\"Load yaml file from disk.\"\"\"\n with open(path, \"r\") as file:\n data = yaml.safe_load(file)\n return data\n\n\nclass ActionBuilder:\n \"\"\"Class for parsing and executing runcards.\n\n Args:\n runcard (path): path containing the runcard.\n folder (path): path for the output folder.\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n\n def __init__(self, runcard, folder=None, force=False):\n path, self.folder = self._generate_output_folder(folder, force)\n self.runcard = load_yaml(runcard)\n platform_name = self.runcard[\"platform\"]\n self._allocate_platform(platform_name)\n self.qubits = self.runcard[\"qubits\"]\n self.format = self.runcard[\"format\"]\n\n # Saving runcard\n self.save_runcards(path, runcard)\n self.save_meta(path, self.folder, platform_name)\n\n @staticmethod\n def _generate_output_folder(folder, force):\n \"\"\"Static method for generating the output folder.\n\n Args:\n folder (path): path for the output folder. If None it will be created a folder automatically\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n if folder is None:\n import getpass\n\n e = datetime.datetime.now()\n user = getpass.getuser().replace(\".\", \"-\")\n date = e.strftime(\"%Y-%m-%d\")\n folder = f\"{date}-{'000'}-{user}\"\n num = 0\n while os.path.exists(folder):\n log.warning(f\"Directory {folder} already exists.\")\n num += 1\n folder = f\"{date}-{str(num).rjust(3, '0')}-{user}\"\n log.warning(f\"Trying to create directory {folder}\")\n elif os.path.exists(folder) and not force:\n raise_error(RuntimeError, f\"Directory {folder} already exists.\")\n elif os.path.exists(folder) and force:\n log.warning(f\"Deleting previous directory {folder}.\")\n shutil.rmtree(os.path.join(os.getcwd(), folder))\n\n path = os.path.join(os.getcwd(), folder)\n log.info(f\"Creating directory {folder}.\")\n os.makedirs(path)\n return path, folder\n\n def _allocate_platform(self, platform_name):\n \"\"\"Allocate the platform using Qibolab.\"\"\"\n from qibo.backends import construct_backend\n\n self.platform = construct_backend(\"qibolab\", platform=platform_name).platform\n\n def save_runcards(self, path, runcard):\n \"\"\"Save the output runcards.\"\"\"\n from qibolab.paths import qibolab_folder\n\n platform_runcard = (\n qibolab_folder / \"runcards\" / f\"{self.runcard['platform']}.yml\"\n )\n shutil.copy(platform_runcard, f\"{path}/platform.yml\")\n shutil.copy(runcard, f\"{path}/runcard.yml\")\n\n def save_meta(self, path, folder, platform_name):\n import qibo\n import qibolab\n\n import qcvv\n\n e = datetime.datetime.now(datetime.timezone.utc)\n meta = {}\n meta[\"title\"] = folder\n meta[\"platform\"] = platform_name\n meta[\"date\"] = e.strftime(\"%Y-%m-%d\")\n meta[\"start-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"versions\"] = {\n \"qibo\": qibo.__version__,\n \"qibolab\": qibolab.__version__,\n \"qcvv\": qcvv.__version__,\n }\n with open(f\"{path}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n def _build_single_action(self, name):\n \"\"\"Helper method to parse the actions in the runcard.\"\"\"\n f = getattr(calibrations, name)\n path = os.path.join(self.folder, f\"data/{name}/\")\n os.makedirs(path)\n sig = inspect.signature(f)\n params = self.runcard[\"actions\"][name]\n for param in list(sig.parameters)[2:-1]:\n if param not in params:\n raise_error(AttributeError, f\"Missing parameter {param} in runcard.\")\n return f, params, path\n\n def execute(self):\n \"\"\"Method to execute sequentially all the actions in the runcard.\"\"\"\n self.platform.connect()\n self.platform.setup()\n self.platform.start()\n for action in self.runcard[\"actions\"]:\n routine, args, path = self._build_single_action(action)\n self._execute_single_action(routine, args, path)\n self.platform.stop()\n self.platform.disconnect()\n\n def _execute_single_action(self, routine, arguments, path):\n \"\"\"Method to execute a single action and retrieving the results.\"\"\"\n for qubit in self.qubits:\n results = routine(self.platform, qubit, **arguments)\n if self.format is None:\n raise_error(\n ValueError, f\"Cannot store data using {self.format} format.\"\n )\n for data in results:\n getattr(data, f\"to_{self.format}\")(path)\n\n self.update_platform_runcard(qubit, routine.__name__)\n\n def update_platform_runcard(self, qubit, routine):\n\n try:\n data_fit = Data.load_data(\n self.folder, routine, self.format, f\"fit_q{qubit}\"\n )\n except:\n data_fit = Data()\n\n params = [i for i in list(data_fit.df.keys()) if \"fit\" not in i]\n settings = load_yaml(f\"{self.folder}/platform.yml\")\n\n for param in params:\n settings[\"characterization\"][\"single_qubit\"][qubit][param] = int(\n data_fit.df[param][0]\n )\n\n with open(f\"{self.folder}/data/{routine}/platform.yml\", \"a+\") as file:\n yaml.dump(\n settings, file, sort_keys=False, indent=4, default_flow_style=None\n )\n\n def dump_report(self):\n from qcvv.web.report import create_report\n\n # update end time\n meta = load_yaml(f\"{self.folder}/meta.yml\")\n e = datetime.datetime.now(datetime.timezone.utc)\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n with open(f\"{self.folder}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n create_report(self.folder)\n\n\nclass ReportBuilder:\n \"\"\"Parses routines and plots to report and live plotting page.\n\n Args:\n path (str): Path to the data folder to generate report for.\n \"\"\"\n\n def __init__(self, path):\n self.path = path\n self.metadata = load_yaml(os.path.join(path, \"meta.yml\"))\n\n # find proper path title\n base, self.title = os.path.join(os.getcwd(), path), \"\"\n while self.title in (\"\", \".\"):\n base, self.title = os.path.split(base)\n\n self.runcard = load_yaml(os.path.join(path, \"runcard.yml\"))\n self.format = self.runcard.get(\"format\")\n self.qubits = self.runcard.get(\"qubits\")\n\n # create calibration routine objects\n # (could be incorporated to :meth:`qcvv.cli.builders.ActionBuilder._build_single_action`)\n self.routines = []\n for action in self.runcard.get(\"actions\"):\n routine = getattr(calibrations, action)\n if not hasattr(routine, \"plots\"):\n routine.plots = []\n self.routines.append(routine)\n\n def get_routine_name(self, routine):\n \"\"\"Prettify routine's name for report headers.\"\"\"\n return routine.__name__.replace(\"_\", \" \").title()\n\n def get_figure(self, routine, method, qubit):\n \"\"\"Get html figure for report.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n import tempfile\n\n figure = method(self.path, routine.__name__, qubit, self.format)\n with tempfile.NamedTemporaryFile() as temp:\n figure.write_html(temp.name, include_plotlyjs=False, full_html=False)\n fightml = temp.read().decode(\"utf-8\")\n return fightml\n\n def get_live_figure(self, routine, method, qubit):\n \"\"\"Get url to dash page for live plotting.\n\n This url is used by :meth:`qcvv.web.app.get_graph`.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n return os.path.join(\n method.__name__,\n self.path,\n routine.__name__,\n str(qubit),\n self.format,\n )\n\nsrc/qcvv/cli/_base.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Adds global CLI options.\"\"\"\nimport base64\nimport pathlib\nimport shutil\nimport socket\nimport subprocess\nimport uuid\nfrom urllib.parse import urljoin\n\nimport click\nfrom qibo.config import log, raise_error\n\nfrom qcvv.cli.builders import ActionBuilder\n\nCONTEXT_SETTINGS = dict(help_option_names=[\"-h\", \"--help\"])\n\n# options for report upload\nUPLOAD_HOST = (\n \"qcvv@localhost\"\n if socket.gethostname() == \"saadiyat\"\n else \"qcvv@login.qrccluster.com\"\n)\nTARGET_DIR = \"qcvv-reports/\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"runcard\", metavar=\"RUNCARD\", type=click.Path(exists=True))\n@click.option(\n \"folder\",\n \"-o\",\n type=click.Path(),\n help=\"Output folder. If not provided a standard name will generated.\",\n)\n@click.option(\n \"force\",\n \"-f\",\n is_flag=True,\n help=\"Use --force option to overwrite the output folder.\",\n)\ndef command(runcard, folder, force=None):\n\n \"\"\"qcvv: Quantum Calibration Verification and Validation using Qibo.\n\n Arguments:\n\n - RUNCARD: runcard with declarative inputs.\n \"\"\"\n\n action_builder = ActionBuilder(runcard, folder, force)\n action_builder.execute()\n action_builder.dump_report()\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.option(\n \"port\",\n \"-p\",\n \"--port\",\n default=8050,\n type=int,\n help=\"Localhost port to launch dash server.\",\n)\n@click.option(\n \"debug\",\n \"-d\",\n \"--debug\",\n is_flag=True,\n help=\"Launch server in debugging mode.\",\n)\ndef live_plot(port, debug):\n \"\"\"Real time plotting of calibration data on a dash server.\"\"\"\n import socket\n\n from qcvv.web.app import app\n\n # change port if it is already used\n while True:\n with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n if s.connect_ex((\"localhost\", port)) != 0:\n break\n port += 1\n\n app.run_server(debug=debug, port=port)\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"output_folder\", metavar=\"FOLDER\", type=click.Path(exists=True))\ndef upload(output_folder):\n \"\"\"Uploads output folder to server\"\"\"\n\n output_path = pathlib.Path(output_folder)\n\n # check the rsync command exists.\n if not shutil.which(\"rsync\"):\n raise_error(\n RuntimeError,\n \"Could not find the rsync command. Please make sure it is installed.\",\n )\n\n # check that we can authentica with a certificate\n ssh_command_line = (\n \"ssh\",\n \"-o\",\n \"PreferredAuthentications=publickey\",\n \"-q\",\n UPLOAD_HOST,\n \"exit\",\n )\n\n str_line = \" \".join(repr(ele) for ele in ssh_command_line)\n\n log.info(f\"Checking SSH connection to {UPLOAD_HOST}.\")\n\n try:\n subprocess.run(ssh_command_line, check=True)\n except subprocess.CalledProcessError as e:\n raise RuntimeError(\n (\n \"Could not validate the SSH key. \"\n \"The command\\n%s\\nreturned a non zero exit status. \"\n \"Please make sure that your public SSH key is on the server.\"\n )\n % str_line\n ) from e\n except OSError as e:\n raise RuntimeError(\n \"Could not run the command\\n{}\\n: {}\".format(str_line, e)\n ) from e\n\n log.info(\"Connection seems OK.\")\n\n # upload output\n randname = base64.urlsafe_b64encode(uuid.uuid4().bytes).decode()\n newdir = TARGET_DIR + randname\n\n rsync_command = (\n \"rsync\",\n \"-aLz\",\n \"--chmod=ug=rwx,o=rx\",\n f\"{output_path}/\",\n f\"{UPLOAD_HOST}:{newdir}\",\n )\n\n log.info(f\"Uploading output ({output_path}) to {UPLOAD_HOST}\")\n try:\n subprocess.run(rsync_command, check=True)\n except subprocess.CalledProcessError as e:\n msg = f\"Failed to upload output: {e}\"\n raise RuntimeError(msg) from e\n\n url = urljoin(ROOT_URL, randname)\n log.info(f\"Upload completed. The result is available at:\\n{url}\")\n\nsrc/qcvv/cli/__init__.py METASEP\n# -*- coding: utf-8 -*-\n\n\"\"\"CLI entry point.\"\"\"\nfrom ._base import command, live_plot, upload\n\nsrc/qcvv/decorators.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Decorators implementation.\"\"\"\nimport os\n\nfrom qcvv.config import raise_error\n\n\ndef plot(header, method):\n \"\"\"Decorator for adding plots in the report and live plotting page.\n\n Args:\n header (str): Header of the plot to use in the report.\n method (Callable): Plotting method defined under ``qcvv.plots``.\n \"\"\"\n\n def wrapped(f):\n if hasattr(f, \"plots\"):\n # insert in the beginning of the list to have\n # proper plot ordering in the report\n f.plots.insert(0, (header, method))\n else:\n f.plots = [(header, method)]\n return f\n\n return wrapped\n\nsrc/qcvv/data.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Implementation of Dataset class to store measurements.\"\"\"\n\nfrom abc import abstractmethod\n\nimport pandas as pd\nimport pint_pandas\n\nfrom qcvv.config import raise_error\n\n\nclass AbstractDataset:\n def __init__(self, name=None):\n\n if name is None:\n self.name = \"data\"\n else:\n self.name = name\n\n self.df = pd.DataFrame()\n\n def __add__(self, data):\n self.df = pd.concat([self.df, data.df], ignore_index=True)\n return self\n\n @abstractmethod\n def add(self, data):\n raise_error(NotImplementedError)\n\n def __len__(self):\n \"\"\"Computes the length of the dataset.\"\"\"\n return len(self.df)\n\n @abstractmethod\n def load_data(cls, folder, routine, format, name):\n raise_error(NotImplementedError)\n\n @abstractmethod\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n if self.quantities == None:\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n else:\n self.df.pint.dequantify().to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\n\nclass Dataset(AbstractDataset):\n \"\"\"Class to store the data measured during the calibration routines.\n It is a wrapper to a pandas DataFrame with units of measure from the Pint\n library.\n\n Args:\n quantities (dict): dictionary containing additional quantities that the user\n may save other than the pulse sequence output. The keys are the name of the\n quantities and the corresponding values are the units of measure.\n \"\"\"\n\n def __init__(self, name=None, quantities=None):\n\n super().__init__(name=name)\n\n self.df = pd.DataFrame(\n {\n \"MSR\": pd.Series(dtype=\"pint[V]\"),\n \"i\": pd.Series(dtype=\"pint[V]\"),\n \"q\": pd.Series(dtype=\"pint[V]\"),\n \"phase\": pd.Series(dtype=\"pint[deg]\"),\n }\n )\n self.quantities = {\"MSR\": \"V\", \"i\": \"V\", \"q\": \"V\", \"phase\": \"deg\"}\n\n if quantities is not None:\n self.quantities.update(quantities)\n for name, unit in quantities.items():\n self.df.insert(0, name, pd.Series(dtype=f\"pint[{unit}]\"))\n\n def add(self, data):\n \"\"\"Add a row to dataset.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n import re\n\n from pint import UnitRegistry\n\n ureg = UnitRegistry()\n l = len(self)\n for key, value in data.items():\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n # TODO: find a better way to do this\n self.df.loc[l, name] = value * ureg(unit)\n\n def get_values(self, quantity, unit):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n unit (str): Unit of the returned values.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n return self.df[quantity].pint.to(unit).pint.magnitude\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n dataset (``Dataset``): dataset object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file, header=[0, 1])\n obj.df = obj.df.pint.quantify(level=-1)\n obj.df.pop(\"Unnamed: 0_level_0\")\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.pint.dequantify().to_csv(f\"{path}/{self.name}.csv\")\n\n\nclass Data(AbstractDataset):\n \"\"\"Class to store the data obtained from calibration routines.\n It is a wrapper to a pandas DataFrame.\n\n Args:\n quantities (dict): dictionary quantities to be saved.\n \"\"\"\n\n def __init__(self, name=None, quantities=None):\n\n super().__init__(name=name)\n\n if quantities is not None:\n self.quantities = quantities\n for name in quantities:\n self.df.insert(0, name, pd.Series(dtype=object))\n\n def add(self, data):\n \"\"\"Add a row to dataset.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n for key, value in data.items():\n self.df.loc[l, key] = value\n\n def get_values(self, quantity):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n return self.df[quantity]\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n dataset (``Dataset``): dataset object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file)\n obj.df.pop(\"Unnamed: 0\")\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\nsrc/qcvv/config.py METASEP\n# -*- coding: utf-8 -*-\n\"\"\"Custom logger implemenation.\"\"\"\nimport logging\nimport os\n\n# Logging level from 0 (all) to 4 (errors) (see https://docs.python.org/3/library/logging.html#logging-levels)\nQCVV_LOG_LEVEL = 1\nif \"QCVV_LOG_LEVEL\" in os.environ: # pragma: no cover\n QCVV_LOG_LEVEL = 10 * int(os.environ.get(\"QCVV_LOG_LEVEL\"))\n\n\ndef raise_error(exception, message=None, args=None):\n \"\"\"Raise exception with logging error.\n\n Args:\n exception (Exception): python exception.\n message (str): the error message.\n \"\"\"\n log.error(message)\n if args:\n raise exception(message, args)\n else:\n raise exception(message)\n\n\n# Configuration for logging mechanism\nclass CustomHandler(logging.StreamHandler):\n \"\"\"Custom handler for logging algorithm.\"\"\"\n\n def format(self, record):\n \"\"\"Format the record with specific format.\"\"\"\n from qcvv import __version__\n\n fmt = f\"[Qcvv {__version__}|%(levelname)s|%(asctime)s]: %(message)s\"\n return logging.Formatter(fmt, datefmt=\"%Y-%m-%d %H:%M:%S\").format(record)\n\n\n# allocate logger object\nlog = logging.getLogger(__name__)\nlog.setLevel(QCVV_LOG_LEVEL)\nlog.addHandler(CustomHandler())\n\nsrc/qcvv/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom .cli import command, live_plot, upload\n\n\"\"\"qcvv: Quantum Calibration Verification and Validation using Qibo.\"\"\"\nimport importlib.metadata as im\n\n__version__ = im.version(__package__)\n\nsrc/qcvv/calibrations/utils.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\n\n\ndef variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n):\n \"\"\"Helper function for sweeps.\"\"\"\n return np.concatenate(\n (\n np.arange(-lowres_width, -highres_width, lowres_step),\n np.arange(-highres_width, highres_width, highres_step),\n np.arange(highres_width, lowres_width, lowres_step),\n )\n )\n\nsrc/qcvv/calibrations/resonator_spectroscopy.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.calibrations.utils import variable_resolution_scanrange\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef resonator_spectroscopy(\n platform: AbstractPlatform,\n qubit,\n lowres_width,\n lowres_step,\n highres_width,\n highres_step,\n precision_width,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n )\n + resonator_frequency\n )\n fast_sweep_data = Dataset(\n name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield fast_sweep_data\n yield lorentzian_fit(\n fast_sweep_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n fast_sweep_data.add(results)\n count += 1\n yield fast_sweep_data\n\n # FIXME: have live ploting work for multiple datasets saved\n\n if platform.resonator_type == \"3D\":\n resonator_frequency = fast_sweep_data.df.frequency[\n fast_sweep_data.df.MSR.index[fast_sweep_data.df.MSR.argmax()]\n ].magnitude\n avg_voltage = (\n np.mean(fast_sweep_data.df.MSR.values[: (lowres_width // lowres_step)])\n * 1e6\n )\n else:\n resonator_frequency = fast_sweep_data.df.frequency[\n fast_sweep_data.df.MSR.index[fast_sweep_data.df.MSR.argmin()]\n ].magnitude\n avg_voltage = (\n np.mean(fast_sweep_data.df.MSR.values[: (lowres_width // lowres_step)])\n * 1e6\n )\n\n precision_sweep__data = Dataset(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(-precision_width, precision_width, precision_step)\n + resonator_frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield precision_sweep__data\n yield lorentzian_fit(\n fast_sweep_data + precision_sweep__data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n precision_sweep__data.add(results)\n count += 1\n yield precision_sweep__data\n\n\n@plot(\"Frequency vs Attenuation\", plots.frequency_attenuation_msr_phase)\n@plot(\"MSR vs Frequency\", plots.frequency_attenuation_msr_phase__cut)\ndef resonator_punchout(\n platform: AbstractPlatform,\n qubit,\n freq_width,\n freq_step,\n min_att,\n max_att,\n step_att,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"}\n )\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence = PulseSequence()\n sequence.add(ro_pulse)\n\n # TODO: move this explicit instruction to the platform\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step)\n + resonator_frequency\n - (freq_width / 4)\n )\n attenuation_range = np.flip(np.arange(min_att, max_att, step_att))\n count = 0\n for _ in range(software_averages):\n for att in attenuation_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n # TODO: move these explicit instructions to the platform\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.ro_port[qubit].attenuation = att\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr * (np.exp(att / 10)),\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"attenuation[dB]\": att,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR and Phase vs Flux Current\", plots.frequency_flux_msr_phase)\ndef resonator_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline=0,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n # TODO: automatically extract the sweet spot current\n # TODO: add a method to generate the matrix\n\n\n@plot(\"MSR row 1 and Phase row 2\", plots.frequency_flux_msr_phase__matrix)\ndef resonator_spectroscopy_flux_matrix(\n platform: AbstractPlatform,\n qubit,\n freq_width,\n freq_step,\n current_min,\n current_max,\n current_step,\n fluxlines,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = np.arange(current_min, current_max, current_step)\n\n count = 0\n for fluxline in fluxlines:\n fluxline = int(fluxline)\n print(fluxline)\n data = Dataset(\n name=f\"data_q{qubit}_f{fluxline}\",\n quantities={\"frequency\": \"Hz\", \"current\": \"A\"},\n )\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qcvv/calibrations/qubit_spectroscopy.py METASEP\n# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef qubit_spectroscopy(\n platform: AbstractPlatform,\n qubit,\n fast_start,\n fast_end,\n fast_step,\n precision_start,\n precision_end,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n freqrange = np.arange(fast_start, fast_end, fast_step) + qubit_frequency\n\n data = Dataset(quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"})\n\n # FIXME: Waiting for Qblox platform to take care of that\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n\n data = Dataset(name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield data\n yield lorentzian_fit(\n data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data.add(results)\n count += 1\n yield data\n\n if platform.resonator_type == \"3D\":\n qubit_frequency = data.df.frequency[\n data.df.MSR.index[data.df.MSR.argmin()]\n ].magnitude\n avg_voltage = (\n np.mean(data.df.MSR.values[: ((fast_end - fast_start) // fast_step)]) * 1e6\n )\n else:\n qubit_frequency = data.df.frequency[\n data.df.MSR.index[data.df.MSR.argmax()]\n ].magnitude\n avg_voltage = (\n np.mean(data.df.MSR.values[: ((fast_end - fast_start) // fast_step)]) * 1e6\n )\n\n prec_data = Dataset(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(precision_start, precision_end, precision_step) + qubit_frequency\n )\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield prec_data\n yield lorentzian_fit(\n data + prec_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n prec_data.add(results)\n count += 1\n yield prec_data\n # TODO: Estimate avg_voltage correctly\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_flux_msr_phase)\ndef qubit_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = np.arange(-freq_width, freq_width, freq_step) + qubit_frequency\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qcvv/calibrations/__init__.py METASEP\n# -*- coding: utf-8 -*-\nfrom qcvv.calibrations.qubit_spectroscopy import *\nfrom qcvv.calibrations.resonator_spectroscopy import *\n\nsrc/qcvv/calibrations/rabi_oscillations.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)\ndef rabi_pulse_length_and_amplitude(\n platform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n","type":"inproject"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)","type":"common"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)\ndef rabi_pulse_length_and_amplitude(\n platform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"duration\": \"ns\", \"amplitude\": \"dimensionless\"},\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)","type":"non_informative"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)\ndef rabi_pulse_length_and_amplitude(\n platform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(\n name=f\"data_q{qubit}\",\n quantities={\"duration\": \"ns\", \"amplitude\": \"dimensionless\"},\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(","type":"random"},{"content":"# -*- coding: utf-8 -*-\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qcvv import plots\nfrom qcvv.data import Dataset\nfrom qcvv.decorators import plot\nfrom qcvv.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"rabi_oscillations_pi_pulse_max_voltage\",\n \"t1\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",","type":"random"}],"string":"[\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\", quantities={\\\"duration\\\": \\\"ns\\\", \\\"gain\\\": \\\"dimensionless\\\"}\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"duration[ns]\\\": duration,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n\\n yield data\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\", quantities={\\\"duration\\\": \\\"ns\\\", \\\"gain\\\": \\\"dimensionless\\\"}\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"duration[ns]\\\": duration,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and amplitude\\\", plots.duration_amplitude_msr_phase)\\ndef rabi_pulse_length_and_amplitude(\\n platform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\", quantities={\\\"duration\\\": \\\"ns\\\", \\\"gain\\\": \\\"dimensionless\\\"}\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\", quantities={\\\"duration\\\": \\\"ns\\\", \\\"gain\\\": \\\"dimensionless\\\"}\\n )\\n\\n sequence = PulseSequence()\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\", quantities={\\\"duration\\\": \\\"ns\\\", \\\"gain\\\": \\\"dimensionless\\\"}\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\",\n \"type\": \"common\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\", quantities={\\\"duration\\\": \\\"ns\\\", \\\"gain\\\": \\\"dimensionless\\\"}\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"duration[ns]\\\": duration,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and amplitude\\\", plots.duration_amplitude_msr_phase)\\ndef rabi_pulse_length_and_amplitude(\\n platform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"duration\\\": \\\"ns\\\", \\\"amplitude\\\": \\\"dimensionless\\\"},\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\", quantities={\\\"duration\\\": \\\"ns\\\", \\\"gain\\\": \\\"dimensionless\\\"}\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"duration[ns]\\\": duration,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and amplitude\\\", plots.duration_amplitude_msr_phase)\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\\ndef rabi_pulse_amplitude(\\n platform,\\n qubit,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_amplitude_range = np.arange(\\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for amplitude in qd_pulse_amplitude_range:\\n qd_pulse.amplitude = amplitude\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"amplitude[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_amplitude\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"amplitude[dimensionless]\\\": amplitude,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\\ndef rabi_pulse_length_and_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\", quantities={\\\"duration\\\": \\\"ns\\\", \\\"gain\\\": \\\"dimensionless\\\"}\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"duration[ns]\\\": duration,\\n \\\"gain[dimensionless]\\\": gain,\\n }\\n data.add(results)\\n count += 1\\n\\n yield data\\n\\n\\n@plot(\\\"MSR vs length and amplitude\\\", plots.duration_amplitude_msr_phase)\\ndef rabi_pulse_length_and_amplitude(\\n platform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n pulse_amplitude_start,\\n pulse_amplitude_end,\\n pulse_amplitude_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(\\n name=f\\\"data_q{qubit}\\\",\\n quantities={\\\"duration\\\": \\\"ns\\\", \\\"amplitude\\\": \\\"dimensionless\\\"},\\n )\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\",\n \"type\": \"random\"\n },\n {\n \"content\": \"# -*- coding: utf-8 -*-\\nimport numpy as np\\nfrom qibolab.platforms.abstract import AbstractPlatform\\nfrom qibolab.pulses import PulseSequence\\n\\nfrom qcvv import plots\\nfrom qcvv.data import Dataset\\nfrom qcvv.decorators import plot\\nfrom qcvv.fitting.methods import rabi_fit\\n\\n\\n@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\\ndef rabi_pulse_length(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_duration_start,\\n pulse_duration_end,\\n pulse_duration_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_duration_range = np.arange(\\n pulse_duration_start, pulse_duration_end, pulse_duration_step\\n )\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for duration in qd_pulse_duration_range:\\n qd_pulse.duration = duration\\n ro_pulse.start = duration\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"Time[ns]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_duration\\\",\\n \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\\n \\\"t1\\\",\\n ],\\n )\\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\\n ro_pulse.serial\\n ]\\n results = {\\n \\\"MSR[V]\\\": msr,\\n \\\"i[V]\\\": i,\\n \\\"q[V]\\\": q,\\n \\\"phase[rad]\\\": phase,\\n \\\"Time[ns]\\\": duration,\\n }\\n data.add(results)\\n count += 1\\n yield data\\n\\n\\n@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\\ndef rabi_pulse_gain(\\n platform: AbstractPlatform,\\n qubit,\\n pulse_gain_start,\\n pulse_gain_end,\\n pulse_gain_step,\\n software_averages,\\n points=10,\\n):\\n platform.reload_settings()\\n\\n data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\\n\\n sequence = PulseSequence()\\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\\n sequence.add(qd_pulse)\\n sequence.add(ro_pulse)\\n\\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\\n\\n # FIXME: Waiting to be able to pass qpucard to qibolab\\n platform.ro_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"resonator_freq\\\"]\\n - ro_pulse.frequency\\n )\\n platform.qd_port[qubit].lo_frequency = (\\n platform.characterization[\\\"single_qubit\\\"][qubit][\\\"qubit_freq\\\"]\\n - qd_pulse.frequency\\n )\\n\\n count = 0\\n for _ in range(software_averages):\\n for gain in qd_pulse_gain_range:\\n platform.qd_port[qubit].gain = gain\\n if count % points == 0 and count > 0:\\n yield data\\n yield rabi_fit(\\n data,\\n x=\\\"gain[dimensionless]\\\",\\n y=\\\"MSR[uV]\\\",\\n qubit=qubit,\\n nqubits=platform.settings[\\\"nqubits\\\"],\\n labels=[\\n \\\"pi_pulse_gain\\\",\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":["@plot(\"MSR vs Time\", plots.time_msr_phase)"," data.add(results)","@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)","@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)"," data = Dataset(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})"," sequence.add(qd_pulse)"," data = Dataset(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})"," sequence.add(ro_pulse)"," data = Dataset(","@plot(\"MSR vs Gain\", plots.gain_msr_phase)","@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)"," data = Dataset(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})"," ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)"," qd_pulse = platform.create_RX_pulse(qubit, start=0)"," ro_pulse.start = duration"," qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)"," ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)"," ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)"," )","","from qibolab.platforms.abstract import AbstractPlatform","def rabi_pulse_length_and_amplitude("," pulse_duration_step,"," - qd_pulse.frequency"," pulse_duration_start, pulse_duration_end, pulse_duration_step"," \"rabi_oscillations_pi_pulse_max_voltage\","],"string":"[\n \"@plot(\\\"MSR vs Time\\\", plots.time_msr_phase)\",\n \" data.add(results)\",\n \"@plot(\\\"MSR vs Amplitude\\\", plots.amplitude_msr_phase)\",\n \"@plot(\\\"MSR vs length and amplitude\\\", plots.duration_amplitude_msr_phase)\",\n \" data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"Time\\\": \\\"ns\\\"})\",\n \" sequence.add(qd_pulse)\",\n \" data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"amplitude\\\": \\\"dimensionless\\\"})\",\n \" sequence.add(ro_pulse)\",\n \" data = Dataset(\",\n \"@plot(\\\"MSR vs Gain\\\", plots.gain_msr_phase)\",\n \"@plot(\\\"MSR vs length and gain\\\", plots.duration_gain_msr_phase)\",\n \" data = Dataset(name=f\\\"data_q{qubit}\\\", quantities={\\\"gain\\\": \\\"dimensionless\\\"})\",\n \" ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\",\n \" qd_pulse = platform.create_RX_pulse(qubit, start=0)\",\n \" ro_pulse.start = duration\",\n \" qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\",\n \" ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\",\n \" ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\",\n \" )\",\n \"\",\n \"from qibolab.platforms.abstract import AbstractPlatform\",\n \"def rabi_pulse_length_and_amplitude(\",\n \" pulse_duration_step,\",\n \" - qd_pulse.frequency\",\n \" pulse_duration_start, pulse_duration_end, pulse_duration_step\",\n \" \\\"rabi_oscillations_pi_pulse_max_voltage\\\",\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":210,"cells":{"repo_id":{"kind":"number","value":16,"string":"16"},"repo_name":{"kind":"string","value":"qiboteam__qibocal"},"project_context":{"kind":"string","value":"qiboteam__qibocal METASEP\n\ndoc/source/conf.py METASEP\n# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\n\nfrom recommonmark.transform import AutoStructify\n\nsys.path.insert(0, os.path.abspath(\"..\"))\n\nimport qibocal\n\n# -- Project information -----------------------------------------------------\n\nproject = \"qibocal\"\ncopyright = \"2022, The Qibo team\"\nauthor = \"The Qibo team\"\n\n# The full version, including alpha/beta/rc tags\nrelease = qibocal.__version__\n\n\n# -- General configuration ---------------------------------------------------\n\n# https://stackoverflow.com/questions/56336234/build-fail-sphinx-error-contents-rst-not-found\nmaster_doc = \"index\"\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.doctest\",\n \"sphinx.ext.coverage\",\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.intersphinx\",\n \"recommonmark\",\n \"sphinx_markdown_tables\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# Markdown configuration\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\nsource_suffix = {\".rst\": \"restructuredtext\", \".txt\": \"markdown\", \".md\": \"markdown\"}\n\nautosectionlabel_prefix_document = True\n# Allow to embed rst syntax in markdown files.\nenable_eval_rst = True\n\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = []\n\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"sphinx_rtd_theme\"\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\n# -- Intersphinx -------------------------------------------------------------\n\nintersphinx_mapping = {\"python\": (\"https://docs.python.org/3\", None)}\n\n# -- Autodoc ------------------------------------------------------------------\n#\nautodoc_member_order = \"bysource\"\n\n# Adapted this from\n# https://github.com/readthedocs/recommonmark/blob/ddd56e7717e9745f11300059e4268e204138a6b1/docs/conf.py\n# app setup hook\ndef setup(app):\n app.add_config_value(\"recommonmark_config\", {\"enable_eval_rst\": True}, True)\n app.add_transform(AutoStructify)\n app.add_css_file(\"css/style.css\")\n\nserverscripts/qibocal-update-on-change.py METASEP\n#!/usr/bin/env python\nimport argparse\n\nimport curio\nimport inotify.adapters\nimport inotify.constants\nfrom curio import subprocess\n\n\nasync def main(folder, exe_args):\n i = inotify.adapters.Inotify()\n i.add_watch(folder)\n\n for event in i.event_gen(yield_nones=False):\n if event is not None:\n (header, _, _, _) = event\n if (\n (header.mask & inotify.constants.IN_CREATE)\n or (header.mask & inotify.constants.IN_DELETE)\n or (header.mask & inotify.constants.IN_MODIFY)\n ):\n await subprocess.run(exe_args)\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\"folder\")\n parser.add_argument(\"exe_args\", nargs=\"+\")\n args = parser.parse_args()\n curio.run(main(args.folder, args.exe_args))\n\nserverscripts/qibocal-index-reports.py METASEP\n\"\"\"qibocal-index-reports.py\nGenerates a JSON index with reports information.\n\"\"\"\nimport json\nimport pathlib\nimport sys\nfrom collections import ChainMap\n\nimport yaml\n\nROOT = \"/home/users/qibocal/qibocal-reports\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\nOUT = \"/home/users/qibocal/qibocal-reports/index.json\"\nDEFAULTS = {\n \"title\": \"-\",\n \"date\": \"-\",\n \"platform\": \"-\",\n \"start-time\": \"-\",\n \"end-time\": \"-\",\n}\nREQUIRED_FILE_METADATA = {\"title\", \"date\", \"platform\", \"start-time\" \"end-time\"}\n\n\ndef meta_from_path(p):\n meta = ChainMap(DEFAULTS)\n yaml_meta = p / \"meta.yml\"\n yaml_res = {}\n if yaml_meta.exists():\n with yaml_meta.open() as f:\n try:\n yaml_res = yaml.safe_load(f)\n except yaml.YAMLError as e:\n print(f\"Error processing {yaml_meta}: {e}\", file=sys.stderr)\n meta = meta.new_child(yaml_res)\n return meta\n\n\ndef register(p):\n path_meta = meta_from_path(p)\n title, date, platform, start_time, end_time = (\n path_meta[\"title\"],\n path_meta[\"date\"],\n path_meta[\"platform\"],\n path_meta[\"start-time\"],\n path_meta[\"end-time\"],\n )\n url = ROOT_URL + p.name\n titlelink = f'{title}'\n return (titlelink, date, platform, start_time, end_time)\n\n\ndef make_index():\n root_path = pathlib.Path(ROOT)\n data = []\n for p in root_path.iterdir():\n if p.is_dir():\n try:\n res = register(p)\n data.append(res)\n except:\n print(\"Error processing folder\", p, file=sys.stderr)\n raise\n\n with open(OUT, \"w\") as f:\n json.dump({\"data\": data}, f)\n\n\nif __name__ == \"__main__\":\n make_index()\n\nsrc/qibocal/web/server.py METASEP\nimport os\nimport pathlib\n\nimport yaml\nfrom flask import Flask, render_template\n\nfrom qibocal import __version__\nfrom qibocal.cli.builders import ReportBuilder\n\nserver = Flask(__name__)\n\n\n@server.route(\"/\")\n@server.route(\"/data/\")\ndef page(path=None):\n folders = [\n folder\n for folder in reversed(sorted(os.listdir(os.getcwd())))\n if os.path.isdir(folder) and \"meta.yml\" in os.listdir(folder)\n ]\n\n report = None\n if path is not None:\n try:\n report = ReportBuilder(path)\n except (FileNotFoundError, TypeError):\n pass\n\n return render_template(\n \"template.html\",\n version=__version__,\n folders=folders,\n report=report,\n )\n\nsrc/qibocal/web/report.py METASEP\nimport os\nimport pathlib\n\nfrom jinja2 import Environment, FileSystemLoader\n\nfrom qibocal import __version__\nfrom qibocal.cli.builders import ReportBuilder\n\n\ndef create_report(path):\n \"\"\"Creates an HTML report for the data in the given path.\"\"\"\n filepath = pathlib.Path(__file__)\n\n with open(os.path.join(filepath.with_name(\"static\"), \"styles.css\")) as file:\n css_styles = f\"\"\n\n report = ReportBuilder(path)\n env = Environment(loader=FileSystemLoader(filepath.with_name(\"templates\")))\n template = env.get_template(\"template.html\")\n\n html = template.render(\n is_static=True,\n css_styles=css_styles,\n version=__version__,\n report=report,\n )\n\n with open(os.path.join(path, \"index.html\"), \"w\") as file:\n file.write(html)\n\nsrc/qibocal/web/app.py METASEP\nimport os\n\nimport pandas as pd\nimport yaml\nfrom dash import Dash, Input, Output, dcc, html\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.web.server import server\n\nDataUnits() # dummy dataset call to suppress ``pint[V]`` error\n\napp = Dash(\n server=server,\n suppress_callback_exceptions=True,\n)\n\napp.layout = html.Div(\n [\n dcc.Location(id=\"url\", refresh=False),\n dcc.Graph(id=\"graph\", figure={}),\n dcc.Interval(\n id=\"interval\",\n # TODO: Perhaps the user should be allowed to change the refresh rate\n interval=1000,\n n_intervals=0,\n disabled=False,\n ),\n ]\n)\n\n\n@app.callback(\n Output(\"graph\", \"figure\"),\n Input(\"interval\", \"n_intervals\"),\n Input(\"graph\", \"figure\"),\n Input(\"url\", \"pathname\"),\n)\ndef get_graph(n, current_figure, url):\n method, folder, routine, qubit, format = url.split(os.sep)[2:]\n try:\n # data = DataUnits.load_data(folder, routine, format, \"precision_sweep\")\n # with open(f\"{folder}/platform.yml\", \"r\") as f:\n # nqubits = yaml.safe_load(f)[\"nqubits\"]\n # if len(data) > 2:\n # params, fit = resonator_spectroscopy_fit(folder, format, nqubits)\n # else:\n # params, fit = None, None\n # return getattr(plots.resonator_spectroscopy, method)(data, params, fit)\n\n # # FIXME: Temporarily hardcode the plotting method to test\n # # multiple routines with different names in one folder\n # # should be changed to:\n # # return getattr(getattr(plots, routine), method)(data)\n\n return getattr(plots, method)(folder, routine, qubit, format)\n except (FileNotFoundError, pd.errors.EmptyDataError):\n return current_figure\n\nsrc/qibocal/web/__init__.py METASEP\n\nsrc/qibocal/tests/test_data.py METASEP\n\"\"\"Some tests for the Data and DataUnits class\"\"\"\nimport numpy as np\nimport pytest\nfrom pint import DimensionalityError, UndefinedUnitError\n\nfrom qibocal.data import Data, DataUnits\n\n\ndef random_data_units(length, options=None):\n data = DataUnits(options=options)\n for l in range(length):\n msr, i, q, phase = np.random.rand(4)\n pulse_sequence_result = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n }\n add_options = {}\n if options is not None:\n for option in options:\n add_options[option] = str(l)\n data.add({**pulse_sequence_result, **add_options})\n\n return data\n\n\ndef random_data(length):\n data = Data()\n for i in range(length):\n data.add({\"int\": int(i), \"float\": float(i), \"string\": str(i), \"bool\": bool(i)})\n return data\n\n\ndef test_data_initialization():\n \"\"\"Test DataUnits constructor\"\"\"\n data = DataUnits()\n assert len(data.df.columns) == 4\n assert list(data.df.columns) == [ # pylint: disable=E1101\n \"MSR\",\n \"i\",\n \"q\",\n \"phase\",\n ]\n\n data1 = DataUnits(quantities={\"attenuation\": \"dB\"})\n assert len(data1.df.columns) == 5\n assert list(data1.df.columns) == [ # pylint: disable=E1101\n \"attenuation\",\n \"MSR\",\n \"i\",\n \"q\",\n \"phase\",\n ]\n\n data2 = DataUnits(quantities={\"attenuation\": \"dB\"}, options=[\"option1\"])\n assert len(data2.df.columns) == 6\n assert list(data2.df.columns) == [ # pylint: disable=E1101\n \"option1\",\n \"attenuation\",\n \"MSR\",\n \"i\",\n \"q\",\n \"phase\",\n ]\n\n\ndef test_data_units_units():\n \"\"\"Test units of measure in DataUnits\"\"\"\n data_units = DataUnits()\n assert data_units.df.MSR.values.units == \"volt\"\n\n data_units1 = DataUnits(quantities={\"frequency\": \"Hz\"})\n assert data_units1.df.frequency.values.units == \"hertz\"\n\n with pytest.raises(UndefinedUnitError):\n data_units2 = DataUnits(quantities={\"fake_unit\": \"fake\"})\n\n\ndef test_data_units_add():\n \"\"\"Test add method of DataUnits\"\"\"\n data_units = random_data_units(5)\n assert len(data_units) == 5\n\n data_units1 = DataUnits(quantities={\"attenuation\": \"dB\"})\n msr, i, q, phase, att = np.random.rand(len(data_units1.df.columns))\n data_units1.add(\n {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"attenuation[dB]\": att,\n }\n )\n assert len(data_units1) == 1\n\n data_units1.add(\n {\n \"MSR[V]\": 0,\n \"i[V]\": 0.0,\n \"q[V]\": 0.0,\n \"phase[deg]\": 0,\n \"attenuation[dB]\": 1,\n }\n )\n assert len(data_units1) == 2\n\n data_units2 = DataUnits()\n msr, i, q, phase = np.random.rand(len(data_units2.df.columns))\n with pytest.raises(DimensionalityError):\n data_units2.add({\"MSR[dB]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\n with pytest.raises(UndefinedUnitError):\n data_units2.add({\"MSR[test]\": msr, \"i[V]\": i, \"q[V]\": q, \"phase[deg]\": phase})\n\n data_units3 = random_data_units(10, options=[\"test\"])\n assert len(data_units3) == 10\n\n\ndef test_data_add():\n \"\"\"Test add method of Data class\"\"\"\n data = random_data(5)\n assert len(data) == 5\n data.add({\"int\": 123, \"float\": 123.456, \"string\": \"123\", \"bool\": True})\n assert len(data) == 6\n\n\ndef test_data_units_load_data_from_dict():\n \"\"\"Test set method of DataUnits class\"\"\"\n data_units = DataUnits()\n test = {\n \"MSR[V]\": [1, 2, 3],\n \"i[V]\": [3.0, 4.0, 5.0],\n \"q[V]\": np.array([3, 4, 5]),\n \"phase[deg]\": [6.0, 7.0, 8.0],\n }\n data_units.load_data_from_dict(test)\n assert len(data_units) == 3\n assert (data_units.get_values(\"MSR\", \"V\") == [1, 2, 3]).all()\n assert (data_units.get_values(\"i\", \"V\") == [3.0, 4.0, 5.0]).all()\n assert (data_units.get_values(\"q\", \"V\") == [3, 4, 5]).all()\n assert (data_units.get_values(\"phase\", \"deg\") == [6.0, 7.0, 8.0]).all()\n\n data_units1 = DataUnits(options=[\"option1\", \"option2\"])\n test = {\"option1\": [\"one\", \"two\", \"three\"], \"option2\": [1, 2, 3]}\n data_units1.load_data_from_dict(test)\n assert len(data_units1) == 3\n assert (data_units1.get_values(\"option1\") == [\"one\", \"two\", \"three\"]).all()\n assert (data_units1.get_values(\"option2\") == [1, 2, 3]).all()\n\n\ndef test_data_load_data_from_dict():\n \"\"\"Test set method of Data class\"\"\"\n data = random_data(5)\n test = {\n \"int\": [1, 2, 3],\n \"float\": [3.0, 4.0, 5.0],\n \"string\": [\"one\", \"two\", \"three\"],\n \"bool\": [True, False, True],\n }\n data.load_data_from_dict(test)\n assert len(data) == 3\n assert (data.get_values(\"int\") == [1, 2, 3]).all()\n assert (data.get_values(\"float\") == [3.0, 4.0, 5.0]).all()\n assert (data.get_values(\"string\") == [\"one\", \"two\", \"three\"]).all()\n assert (data.get_values(\"bool\") == [True, False, True]).all()\n\n\ndef test_get_values_data_units():\n \"\"\"Test get_values method of DataUnits class\"\"\"\n data_units = random_data_units(5, options=[\"option\"])\n\n assert (data_units.get_values(\"option\") == data_units.df[\"option\"]).all()\n assert (\n data_units.get_values(\"MSR\", \"uV\")\n == data_units.df[\"MSR\"].pint.to(\"uV\").pint.magnitude\n ).all()\n\n\ndef test_get_values_data():\n \"\"\"Test get_values method of Data class\"\"\"\n data = random_data(5)\n assert (data.get_values(\"int\") == data.df[\"int\"]).all()\n\nsrc/qibocal/plots/t1.py METASEP\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import exp\n\n\n# T1\ndef t1_time_msr_phase(folder, routine, qubit, format):\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(quantities={\"Time\": \"ns\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = DataUnits()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"T1\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"T1\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"Time\", \"ns\")),\n max(data.get_values(\"Time\", \"ns\")),\n 2 * len(data),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=exp(\n timerange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} ns.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Time (ns)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\nsrc/qibocal/plots/spectroscopies.py METASEP\nimport os\n\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import lorenzian\n\n\ndef frequency_msr_phase__fast_precision(folder, routine, qubit, format):\n try:\n data_fast = DataUnits.load_data(folder, routine, format, f\"fast_sweep_q{qubit}\")\n except:\n data_fast = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_precision = DataUnits.load_data(\n folder, routine, format, f\"precision_sweep_q{qubit}\"\n )\n except:\n data_precision = DataUnits(quantities={\"frequency\": \"Hz\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"MSR\", \"uV\"),\n name=\"Fast\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_fast.get_values(\"frequency\", \"GHz\"),\n y=data_fast.get_values(\"phase\", \"rad\"),\n name=\"Fast\",\n ),\n row=1,\n col=2,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"MSR\", \"uV\"),\n name=\"Precision\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_precision.get_values(\"frequency\", \"GHz\"),\n y=data_precision.get_values(\"phase\", \"rad\"),\n name=\"Precision\",\n ),\n row=1,\n col=2,\n )\n if len(data_fast) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_fast.get_values(\"frequency\", \"GHz\")),\n max(data_fast.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_fast),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase__cut(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n plot1d_attenuation = 30 # attenuation value to use for 1D frequency vs MSR plot\n\n fig = go.Figure()\n # index data on a specific attenuation value\n smalldf = data.df[data.get_values(\"attenuation\", \"dB\") == plot1d_attenuation].copy()\n # split multiple software averages to different datasets\n datasets = []\n while len(smalldf):\n datasets.append(smalldf.drop_duplicates(\"frequency\"))\n smalldf.drop(datasets[-1].index, inplace=True)\n fig.add_trace(\n go.Scatter(\n x=datasets[-1][\"frequency\"].pint.to(\"GHz\").pint.magnitude,\n y=datasets[-1][\"MSR\"].pint.to(\"V\").pint.magnitude,\n ),\n )\n\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting,\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (V)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Current (A)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Current (A)\",\n )\n return fig\n\n\ndef frequency_flux_msr_phase__matrix(folder, routine, qubit, format):\n fluxes = []\n for i in range(25): # FIXME: 25 is hardcoded\n file = f\"{folder}/data/{routine}/data_q{qubit}_f{i}.csv\"\n if os.path.exists(file):\n fluxes += [i]\n\n if len(fluxes) < 1:\n nb = 1\n else:\n nb = len(fluxes)\n fig = make_subplots(\n rows=2,\n cols=nb,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n x_title=\"Frequency (Hz)\",\n y_title=\"Current (A)\",\n shared_xaxes=True,\n shared_yaxes=True,\n )\n\n for j in fluxes:\n if j == fluxes[-1]:\n showscale = True\n else:\n showscale = False\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}_f{j}\")\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"MSR\", \"V\"),\n showscale=showscale,\n ),\n row=1,\n col=j,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"current\", \"A\"),\n z=data.get_values(\"phase\", \"rad\"),\n showscale=showscale,\n ),\n row=2,\n col=j,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n )\n return fig\n\n\ndef frequency_attenuation_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"frequency\", \"GHz\"),\n y=data.get_values(\"attenuation\", \"dB\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"Attenuation (dB)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Attenuation (dB)\",\n )\n return fig\n\n\ndef dispersive_frequency_msr_phase(folder, routine, qubit, formato):\n\n try:\n data_spec = DataUnits.load_data(folder, routine, formato, f\"data_q{qubit}\")\n except:\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n\n try:\n data_shifted = DataUnits.load_data(\n folder, routine, formato, f\"data_shifted_q{qubit}\"\n )\n except:\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n\n try:\n data_fit = Data.load_data(folder, routine, formato, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n try:\n data_fit_shifted = Data.load_data(\n folder, routine, formato, f\"fit_shifted_q{qubit}\"\n )\n except:\n data_fit_shifted = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"MSR\", \"uV\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data_spec.get_values(\"frequency\", \"GHz\"),\n y=data_spec.get_values(\"phase\", \"rad\"),\n name=\"Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"MSR\", \"uV\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=1,\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_shifted.get_values(\"frequency\", \"GHz\"),\n y=data_shifted.get_values(\"phase\", \"rad\"),\n name=\"Shifted Spectroscopy\",\n ),\n row=1,\n col=2,\n )\n\n # fitting traces\n if len(data_spec) > 0 and len(data_fit) > 0:\n freqrange = np.linspace(\n min(data_spec.get_values(\"frequency\", \"GHz\")),\n max(data_spec.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_spec),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"The estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # fitting shifted traces\n if len(data_shifted) > 0 and len(data_fit_shifted) > 0:\n freqrange = np.linspace(\n min(data_shifted.get_values(\"frequency\", \"GHz\")),\n max(data_shifted.get_values(\"frequency\", \"GHz\")),\n 2 * len(data_shifted),\n )\n params = [i for i in list(data_fit_shifted.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=freqrange,\n y=lorenzian(\n freqrange,\n data_fit_shifted.get_values(\"popt0\"),\n data_fit_shifted.get_values(\"popt1\"),\n data_fit_shifted.get_values(\"popt2\"),\n data_fit_shifted.get_values(\"popt3\"),\n ),\n name=\"Fit shifted spectroscopy\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"The estimated shifted {params[0]} is {data_fit_shifted.df[params[0]][0]:.1f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Frequency (GHz)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Frequency (GHz)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\nsrc/qibocal/plots/ramsey.py METASEP\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import ramsey\n\n\n# For Ramsey oscillations\ndef time_msr(folder, routine, qubit, format):\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"}\n )\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = DataUnits()\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\"MSR (V)\",),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"wait\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Ramsey\",\n ),\n row=1,\n col=1,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"wait\", \"ns\")),\n max(data.get_values(\"wait\", \"ns\")),\n 2 * len(data),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=ramsey(\n timerange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n data_fit.get_values(\"popt4\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} Hz.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} ns\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[2]} is {data_fit.df[params[2]][0]:.3f} Hz\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\nsrc/qibocal/plots/rabi.py METASEP\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import rabi\n\n\n# For Rabi oscillations\ndef time_msr_phase(folder, routine, qubit, format):\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(quantities={\"Time\": \"ns\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"Time\", \"ns\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n timerange = np.linspace(\n min(data.get_values(\"Time\", \"ns\")),\n max(data.get_values(\"Time\", \"ns\")),\n 2 * len(data),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=timerange,\n y=rabi(\n timerange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n data_fit.get_values(\"popt4\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f} ns.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.1f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Time (ns)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Time (ns)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\n\ndef gain_msr_phase(folder, routine, qubit, format):\n\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(quantities={\"gain\", \"dimensionless\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = Data(\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n \"label1\",\n \"label2\",\n ]\n )\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"gain\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"gain\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n gainrange = np.linspace(\n min(data.get_values(\"gain\", \"dimensionless\")),\n max(data.get_values(\"gain\", \"dimensionless\")),\n 2 * len(data),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=gainrange,\n y=rabi(\n gainrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n data_fit.get_values(\"popt4\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.4f} uV\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Gain (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\n\ndef amplitude_msr_phase(folder, routine, qubit, format):\n\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(quantities={\"amplitude\", \"dimensionless\"})\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = DataUnits()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"amplitude\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"amplitude\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Rabi Oscillations\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n amplituderange = np.linspace(\n min(data.get_values(\"amplitude\", \"dimensionless\")),\n max(data.get_values(\"amplitude\", \"dimensionless\")),\n 2 * len(data),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=amplituderange,\n y=rabi(\n amplituderange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n data_fit.get_values(\"popt4\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.3f} uV.\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.4f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Amplitude (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n )\n return fig\n\n\ndef duration_gain_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"gain\", \"dimensionless\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"gain\", \"dimensionless\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"duration (ns)\",\n yaxis_title=\"gain (dimensionless)\",\n xaxis2_title=\"duration (ns)\",\n yaxis2_title=\"gain (dimensionless)\",\n )\n return fig\n\n\ndef duration_amplitude_msr_phase(folder, routine, qubit, format):\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"amplitude\", \"dimensionless\"),\n z=data.get_values(\"MSR\", \"V\"),\n colorbar_x=0.45,\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Heatmap(\n x=data.get_values(\"duration\", \"ns\"),\n y=data.get_values(\"amplitude\", \"dimensionless\"),\n z=data.get_values(\"phase\", \"rad\"),\n colorbar_x=1.0,\n ),\n row=1,\n col=2,\n )\n fig.update_layout(\n showlegend=False,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"duration (ns)\",\n yaxis_title=\"amplitude (dimensionless)\",\n xaxis2_title=\"duration (ns)\",\n yaxis2_title=\"amplitude (dimensionless)\",\n )\n return fig\n\nsrc/qibocal/plots/flipping.py METASEP\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import flipping\n\n\n# Flipping\ndef flips_msr_phase(folder, routine, qubit, format):\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(quantities={\"flips\": \"dimensionless\"})\n\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = DataUnits()\n\n fig = make_subplots(\n rows=1,\n cols=2,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\n \"MSR (V)\",\n \"phase (rad)\",\n ),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"flips\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n name=\"Flipping MSR\",\n ),\n row=1,\n col=1,\n )\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"flips\", \"dimensionless\"),\n y=data.get_values(\"phase\", \"rad\"),\n name=\"Flipping Phase\",\n ),\n row=1,\n col=2,\n )\n\n # add fitting trace\n if len(data) > 0 and len(data_fit) > 0:\n flipsrange = np.linspace(\n min(data.get_values(\"flips\", \"dimensionless\")),\n max(data.get_values(\"flips\", \"dimensionless\")),\n 2 * len(data),\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=flipsrange,\n y=flipping(\n flipsrange,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.25,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.4f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.30,\n showarrow=False,\n text=f\"Estimated {params[1]} is {data_fit.df[params[1]][0]:.3f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n # last part\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Flips (dimensionless)\",\n yaxis_title=\"MSR (uV)\",\n xaxis2_title=\"Flips (dimensionless)\",\n yaxis2_title=\"Phase (rad)\",\n )\n return fig\n\nsrc/qibocal/plots/calibrate_qubit_states.py METASEP\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import DataUnits\n\n\n# For calibrate qubit states\ndef exc_gnd(folder, routine, qubit, format):\n\n try:\n data_exc = DataUnits.load_data(folder, routine, format, f\"data_exc_q{qubit}\")\n except:\n data_exc = DataUnits(quantities={\"iteration\": \"dimensionless\"})\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(\"Calibrate qubit states\",),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data_exc.get_values(\"i\", \"V\"),\n y=data_exc.get_values(\"q\", \"V\"),\n name=\"exc_state\",\n mode=\"markers\",\n marker=dict(size=3, color=\"lightcoral\"),\n ),\n row=1,\n col=1,\n )\n\n try:\n data_gnd = DataUnits.load_data(folder, routine, format, f\"data_gnd_q{qubit}\")\n except:\n data_gnd = DataUnits(quantities={\"iteration\": \"dimensionless\"})\n\n fig.add_trace(\n go.Scatter(\n x=data_gnd.get_values(\"i\", \"V\"),\n y=data_gnd.get_values(\"q\", \"V\"),\n name=\"gnd state\",\n mode=\"markers\",\n marker=dict(size=3, color=\"skyblue\"),\n ),\n row=1,\n col=1,\n )\n\n i_exc = data_exc.get_values(\"i\", \"V\")\n q_exc = data_exc.get_values(\"q\", \"V\")\n\n i_mean_exc = i_exc.mean()\n q_mean_exc = q_exc.mean()\n iq_mean_exc = complex(i_mean_exc, q_mean_exc)\n mod_iq_exc = abs(iq_mean_exc) * 1e6\n\n fig.add_trace(\n go.Scatter(\n x=[i_mean_exc],\n y=[q_mean_exc],\n name=f\" state1_voltage: {mod_iq_exc}
mean_state1: {iq_mean_exc}\",\n mode=\"markers\",\n marker=dict(size=10, color=\"red\"),\n ),\n row=1,\n col=1,\n )\n\n i_gnd = data_gnd.get_values(\"i\", \"V\")\n q_gnd = data_gnd.get_values(\"q\", \"V\")\n\n i_mean_gnd = i_gnd.mean()\n q_mean_gnd = q_gnd.mean()\n iq_mean_gnd = complex(i_mean_gnd, q_mean_gnd)\n mod_iq_gnd = abs(iq_mean_gnd) * 1e6\n\n fig.add_trace(\n go.Scatter(\n x=[i_mean_gnd],\n y=[q_mean_gnd],\n name=f\" state0_voltage: {mod_iq_gnd}
mean_state0: {iq_mean_gnd}\",\n mode=\"markers\",\n marker=dict(size=10, color=\"blue\"),\n ),\n row=1,\n col=1,\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"i (V)\",\n yaxis_title=\"q (V)\",\n width=1000,\n )\n\n return fig\n\nsrc/qibocal/plots/allXY.py METASEP\nimport numpy as np\nimport plotly.graph_objects as go\nfrom plotly.subplots import make_subplots\n\nfrom qibocal.data import Data, DataUnits\nfrom qibocal.fitting.utils import cos\n\n\n# allXY\ndef prob_gate(folder, routine, qubit, format):\n\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"}\n )\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(f\"allXY\",),\n )\n\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"gateNumber\", \"dimensionless\"),\n y=data.get_values(\"probability\", \"dimensionless\"),\n mode=\"markers\",\n name=\"Probabilities\",\n ),\n row=1,\n col=1,\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Gate sequence number\",\n yaxis_title=\"Z projection probability of qubit state |o>\",\n )\n return fig\n\n\n# allXY\ndef prob_gate_iteration(folder, routine, qubit, format):\n\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n }\n )\n\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.1,\n vertical_spacing=0.1,\n subplot_titles=(f\"allXY\",),\n )\n\n gates = len(data.get_values(\"gateNumber\", \"dimensionless\"))\n # print(gates)\n import numpy as np\n\n for n in range(gates // 21):\n data_start = n * 21\n data_end = data_start + 21\n beta_param = np.array(data.get_values(\"beta_param\", \"dimensionless\"))[\n data_start\n ]\n gates = np.array(data.get_values(\"gateNumber\", \"dimensionless\"))[\n data_start:data_end\n ]\n probabilities = np.array(data.get_values(\"probability\", \"dimensionless\"))[\n data_start:data_end\n ]\n c = \"#\" + \"{:06x}\".format(n * 99999)\n fig.add_trace(\n go.Scatter(\n x=gates,\n y=probabilities,\n mode=\"markers+lines\",\n line=dict(color=c),\n name=f\"beta_parameter = {beta_param}\",\n marker_size=16,\n ),\n row=1,\n col=1,\n )\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Gate sequence number\",\n yaxis_title=\"Z projection probability of qubit state |o>\",\n )\n return fig\n\n\n# beta param tuning\ndef msr_beta(folder, routine, qubit, format):\n\n try:\n data = DataUnits.load_data(folder, routine, format, f\"data_q{qubit}\")\n except:\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"}\n )\n try:\n data_fit = Data.load_data(folder, routine, format, f\"fit_q{qubit}\")\n except:\n data_fit = DataUnits()\n\n fig = make_subplots(\n rows=1,\n cols=1,\n horizontal_spacing=0.01,\n vertical_spacing=0.01,\n subplot_titles=(f\"beta_param_tuning\",),\n )\n\n c = \"#6597aa\"\n fig.add_trace(\n go.Scatter(\n x=data.get_values(\"beta_param\", \"dimensionless\"),\n y=data.get_values(\"MSR\", \"uV\"),\n line=dict(color=c),\n mode=\"markers\",\n name=\"[Rx(pi/2) - Ry(pi)] - [Ry(pi/2) - Rx(pi)]\",\n ),\n row=1,\n col=1,\n )\n # add fitting traces\n if len(data) > 0 and len(data_fit) > 0:\n beta_param = np.linspace(\n min(data.get_values(\"beta_param\", \"dimensionless\")),\n max(data.get_values(\"beta_param\", \"dimensionless\")),\n 20,\n )\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n fig.add_trace(\n go.Scatter(\n x=beta_param,\n y=cos(\n beta_param,\n data_fit.get_values(\"popt0\"),\n data_fit.get_values(\"popt1\"),\n data_fit.get_values(\"popt2\"),\n data_fit.get_values(\"popt3\"),\n ),\n name=\"Fit\",\n line=go.scatter.Line(dash=\"dot\"),\n ),\n row=1,\n col=1,\n )\n\n fig.add_annotation(\n dict(\n font=dict(color=\"black\", size=12),\n x=0,\n y=-0.20,\n showarrow=False,\n text=f\"Estimated {params[0]} is {data_fit.df[params[0]][0]:.4f}\",\n textangle=0,\n xanchor=\"left\",\n xref=\"paper\",\n yref=\"paper\",\n )\n )\n\n fig.update_layout(\n showlegend=True,\n uirevision=\"0\", # ``uirevision`` allows zooming while live plotting\n xaxis_title=\"Beta parameter\",\n yaxis_title=\"MSR[uV]\",\n )\n return fig\n\nsrc/qibocal/plots/__init__.py METASEP\nfrom qibocal.plots.allXY import *\nfrom qibocal.plots.calibrate_qubit_states import *\nfrom qibocal.plots.flipping import *\nfrom qibocal.plots.rabi import *\nfrom qibocal.plots.ramsey import *\nfrom qibocal.plots.spectroscopies import *\nfrom qibocal.plots.t1 import *\n\nsrc/qibocal/fitting/utils.py METASEP\nimport re\n\nimport numpy as np\n\n\ndef lorenzian(frequency, amplitude, center, sigma, offset):\n # http://openafox.com/science/peak-function-derivations.html\n return (amplitude / np.pi) * (\n sigma / ((frequency - center) ** 2 + sigma**2)\n ) + offset\n\n\ndef rabi(x, p0, p1, p2, p3, p4):\n # A fit to Superconducting Qubit Rabi Oscillation\n # Offset : p[0]\n # Oscillation amplitude : p[1]\n # Period T : 1/p[2]\n # Phase : p[3]\n # Arbitrary parameter T_2 : 1/p[4]\n return p0 + p1 * np.sin(2 * np.pi * x * p2 + p3) * np.exp(-x * p4)\n\n\ndef ramsey(x, p0, p1, p2, p3, p4):\n # A fit to Superconducting Qubit Rabi Oscillation\n # Offset : p[0]\n # Oscillation amplitude : p[1]\n # DeltaFreq : p[2]\n # Phase : p[3]\n # Arbitrary parameter T_2 : 1/p[4]\n return p0 + p1 * np.sin(2 * np.pi * x * p2 + p3) * np.exp(-x * p4)\n\n\ndef exp(x, *p):\n return p[0] - p[1] * np.exp(-1 * x * p[2])\n\n\ndef flipping(x, p0, p1, p2, p3):\n # A fit to Flipping Qubit oscillation\n # Epsilon?? shoule be Amplitude : p[0]\n # Offset : p[1]\n # Period of oscillation : p[2]\n # phase for the first point corresponding to pi/2 rotation : p[3]\n return np.sin(x * 2 * np.pi / p2 + p3) * p0 + p1\n\n\ndef cos(x, p0, p1, p2, p3):\n # Offset : p[0]\n # Amplitude : p[1]\n # Period : p[2]\n # Phase : p[3]\n return p0 + p1 * np.cos(2 * np.pi * x / p2 + p3)\n\n\ndef parse(key):\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n return name, unit\n\nsrc/qibocal/fitting/methods.py METASEP\n\"\"\"Routine-specific method for post-processing data acquired.\"\"\"\nimport lmfit\nimport numpy as np\nfrom scipy.optimize import curve_fit\n\nfrom qibocal.config import log\nfrom qibocal.data import Data\nfrom qibocal.fitting.utils import cos, exp, flipping, lorenzian, parse, rabi, ramsey\n\n\ndef lorentzian_fit(data, x, y, qubit, nqubits, labels, fit_file_name=None):\n \"\"\"Fitting routine for resonator spectroscopy\"\"\"\n if fit_file_name == None:\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[1],\n labels[0],\n ],\n )\n else:\n data_fit = Data(\n name=fit_file_name + f\"_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[1],\n labels[0],\n ],\n )\n\n frequencies = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n # Create a lmfit model for fitting equation defined in resonator_peak\n model_Q = lmfit.Model(lorenzian)\n\n # Guess parameters for Lorentzian max or min\n if (nqubits == 1 and labels[0] == \"resonator_freq\") or (\n nqubits != 1 and labels[0] == \"qubit_freq\"\n ):\n guess_center = frequencies[\n np.argmax(voltages)\n ] # Argmax = Returns the indices of the maximum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmin(voltages)] - guess_center)\n guess_amp = (np.max(voltages) - guess_offset) * guess_sigma * np.pi\n\n else:\n guess_center = frequencies[\n np.argmin(voltages)\n ] # Argmin = Returns the indices of the minimum values along an axis.\n guess_offset = np.mean(\n voltages[np.abs(voltages - np.mean(voltages) < np.std(voltages))]\n )\n guess_sigma = abs(frequencies[np.argmax(voltages)] - guess_center)\n guess_amp = (np.min(voltages) - guess_offset) * guess_sigma * np.pi\n\n # Add guessed parameters to the model\n model_Q.set_param_hint(\"center\", value=guess_center, vary=True)\n model_Q.set_param_hint(\"sigma\", value=guess_sigma, vary=True)\n model_Q.set_param_hint(\"amplitude\", value=guess_amp, vary=True)\n model_Q.set_param_hint(\"offset\", value=guess_offset, vary=True)\n guess_parameters = model_Q.make_params()\n\n # fit the model with the data and guessed parameters\n try:\n fit_res = model_Q.fit(\n data=voltages, frequency=frequencies, params=guess_parameters\n )\n except:\n log.warning(\"The fitting was not successful\")\n return data_fit\n\n # get the values for postprocessing and for legend.\n f0 = fit_res.best_values[\"center\"]\n BW = fit_res.best_values[\"sigma\"] * 2\n Q = abs(f0 / BW)\n peak_voltage = (\n fit_res.best_values[\"amplitude\"] / (fit_res.best_values[\"sigma\"] * np.pi)\n + fit_res.best_values[\"offset\"]\n )\n\n freq = f0 * 1e9\n\n data_fit.add(\n {\n labels[1]: peak_voltage,\n labels[0]: freq,\n \"popt0\": fit_res.best_values[\"amplitude\"],\n \"popt1\": fit_res.best_values[\"center\"],\n \"popt2\": fit_res.best_values[\"sigma\"],\n \"popt3\": fit_res.best_values[\"offset\"],\n }\n )\n return data_fit\n\n\ndef rabi_fit(data, x, y, qubit, nqubits, labels):\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n else:\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmax(voltages.values)],\n np.pi / 2,\n 0.1e-6,\n ]\n try:\n popt, pcov = curve_fit(\n rabi, time.values, voltages.values, p0=pguess, maxfev=10000\n )\n smooth_dataset = rabi(time.values, *popt)\n pi_pulse_duration = np.abs((1.0 / popt[2]) / 2)\n pi_pulse_max_voltage = smooth_dataset.max()\n t2 = 1.0 / popt[4] # double check T1\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: pi_pulse_duration,\n labels[1]: pi_pulse_max_voltage,\n }\n )\n return data_fit\n\n\ndef ramsey_fit(data, x, y, qubit, qubit_freq, sampling_rate, offset_freq, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n \"popt4\",\n labels[0],\n labels[1],\n labels[2],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n np.mean(voltages.values),\n np.max(voltages.values) - np.min(voltages.values),\n 0.5 / time.values[np.argmin(voltages.values)],\n np.pi / 2,\n 500e-9,\n ]\n\n try:\n popt, pcov = curve_fit(\n ramsey, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n delta_fitting = popt[2]\n delta_phys = int((delta_fitting * sampling_rate) - offset_freq)\n corrected_qubit_frequency = int(qubit_freq + delta_phys)\n t2 = 1.0 / popt[4]\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n \"popt4\": popt[4],\n labels[0]: delta_phys,\n labels[1]: corrected_qubit_frequency,\n labels[2]: t2,\n }\n )\n return data_fit\n\n\ndef t1_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n labels[0],\n ],\n )\n\n time = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [\n max(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n else:\n pguess = [\n min(voltages.values),\n (max(voltages.values) - min(voltages.values)),\n 1 / 250,\n ]\n\n try:\n popt, pcov = curve_fit(\n exp, time.values, voltages.values, p0=pguess, maxfev=2000000\n )\n t1 = abs(1 / popt[2])\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n labels[0]: t1,\n }\n )\n return data_fit\n\n\ndef flipping_fit(data, x, y, qubit, nqubits, niter, pi_pulse_amplitude, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n labels[1],\n ],\n )\n\n flips = data.get_values(*parse(x)) # Check X data stores. N flips or i?\n voltages = data.get_values(*parse(y))\n\n if nqubits == 1:\n pguess = [0.0003, np.mean(voltages), -18, 0] # epsilon guess parameter\n else:\n pguess = [0.0003, np.mean(voltages), 18, 0] # epsilon guess parameter\n\n try:\n popt, pcov = curve_fit(flipping, flips, voltages, p0=pguess, maxfev=2000000)\n epsilon = -np.pi / popt[2]\n amplitude_delta = np.pi / (np.pi + epsilon)\n corrected_amplitude = amplitude_delta * pi_pulse_amplitude\n # angle = (niter * 2 * np.pi / popt[2] + popt[3]) / (1 + 4 * niter)\n # amplitude_delta = angle * 2 / np.pi * pi_pulse_amplitude\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: amplitude_delta,\n labels[1]: corrected_amplitude,\n }\n )\n return data_fit\n\n\ndef drag_tunning_fit(data, x, y, qubit, nqubits, labels):\n\n data_fit = Data(\n name=f\"fit_q{qubit}\",\n quantities=[\n \"popt0\",\n \"popt1\",\n \"popt2\",\n \"popt3\",\n labels[0],\n ],\n )\n\n beta_params = data.get_values(*parse(x))\n voltages = data.get_values(*parse(y))\n\n pguess = [\n 0, # Offset: p[0]\n beta_params.values[np.argmax(voltages)]\n - beta_params.values[np.argmin(voltages)], # Amplitude: p[1]\n 4, # Period: p[2]\n 0.3, # Phase: p[3]\n ]\n\n try:\n popt, pcov = curve_fit(cos, beta_params.values, voltages.values)\n smooth_dataset = cos(beta_params.values, popt[0], popt[1], popt[2], popt[3])\n beta_optimal = beta_params.values[np.argmin(smooth_dataset)]\n\n except:\n log.warning(\"The fitting was not succesful\")\n return data_fit\n\n data_fit.add(\n {\n \"popt0\": popt[0],\n \"popt1\": popt[1],\n \"popt2\": popt[2],\n \"popt3\": popt[3],\n labels[0]: beta_optimal,\n }\n )\n return data_fit\n\nsrc/qibocal/fitting/__init__.py METASEP\n\nsrc/qibocal/cli/builders.py METASEP\nimport datetime\nimport inspect\nimport os\nimport shutil\n\nimport yaml\n\nfrom qibocal import calibrations\nfrom qibocal.config import log, raise_error\nfrom qibocal.data import Data\n\n\ndef load_yaml(path):\n \"\"\"Load yaml file from disk.\"\"\"\n with open(path) as file:\n data = yaml.safe_load(file)\n return data\n\n\nclass ActionBuilder:\n \"\"\"Class for parsing and executing runcards.\n Args:\n runcard (path): path containing the runcard.\n folder (path): path for the output folder.\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n\n def __init__(self, runcard, folder=None, force=False):\n path, self.folder = self._generate_output_folder(folder, force)\n self.runcard = load_yaml(runcard)\n # Qibolab default backend if not provided in runcard.\n backend_name = self.runcard.get(\"backend\", \"qibolab\")\n platform_name = self.runcard.get(\"platform\", \"dummy\")\n self.backend, self.platform = self._allocate_backend(\n backend_name, platform_name, path\n )\n self.qubits = self.runcard[\"qubits\"]\n self.format = self.runcard[\"format\"]\n\n # Saving runcard\n shutil.copy(runcard, f\"{path}/runcard.yml\")\n self.save_meta(path, self.folder)\n\n @staticmethod\n def _generate_output_folder(folder, force):\n \"\"\"Static method for generating the output folder.\n Args:\n folder (path): path for the output folder. If None it will be created a folder automatically\n force (bool): option to overwrite the output folder if it exists already.\n \"\"\"\n if folder is None:\n import getpass\n\n e = datetime.datetime.now()\n user = getpass.getuser().replace(\".\", \"-\")\n date = e.strftime(\"%Y-%m-%d\")\n folder = f\"{date}-{'000'}-{user}\"\n num = 0\n while os.path.exists(folder):\n log.info(f\"Directory {folder} already exists.\")\n num += 1\n folder = f\"{date}-{str(num).rjust(3, '0')}-{user}\"\n log.info(f\"Trying to create directory {folder}\")\n elif os.path.exists(folder) and not force:\n raise_error(RuntimeError, f\"Directory {folder} already exists.\")\n elif os.path.exists(folder) and force:\n log.warning(f\"Deleting previous directory {folder}.\")\n shutil.rmtree(os.path.join(os.getcwd(), folder))\n\n path = os.path.join(os.getcwd(), folder)\n log.info(f\"Creating directory {folder}.\")\n os.makedirs(path)\n return path, folder\n\n def _allocate_backend(self, backend_name, platform_name, path):\n \"\"\"Allocate the platform using Qibolab.\"\"\"\n from qibo.backends import GlobalBackend, set_backend\n\n if backend_name == \"qibolab\":\n from qibolab.paths import qibolab_folder\n\n original_runcard = qibolab_folder / \"runcards\" / f\"{platform_name}.yml\"\n # copy of the original runcard that will stay unmodified\n shutil.copy(original_runcard, f\"{path}/platform.yml\")\n # copy of the original runcard that will be modified during calibration\n updated_runcard = f\"{self.folder}/new_platform.yml\"\n shutil.copy(original_runcard, updated_runcard)\n # allocate backend with updated_runcard\n set_backend(\n backend=backend_name, platform=platform_name, runcard=updated_runcard\n )\n backend = GlobalBackend()\n return backend, backend.platform\n else:\n set_backend(backend=backend_name, platform=platform_name)\n backend = GlobalBackend()\n return backend, None\n\n def save_meta(self, path, folder):\n import qibocal\n\n e = datetime.datetime.now(datetime.timezone.utc)\n meta = {}\n meta[\"title\"] = folder\n meta[\"backend\"] = str(self.backend)\n meta[\"platform\"] = str(self.backend.platform)\n meta[\"date\"] = e.strftime(\"%Y-%m-%d\")\n meta[\"start-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n meta[\"versions\"] = self.backend.versions # pylint: disable=E1101\n meta[\"versions\"][\"qibocal\"] = qibocal.__version__\n\n with open(f\"{path}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n def _build_single_action(self, name):\n \"\"\"Helper method to parse the actions in the runcard.\"\"\"\n f = getattr(calibrations, name)\n path = os.path.join(self.folder, f\"data/{name}/\")\n os.makedirs(path)\n sig = inspect.signature(f)\n params = self.runcard[\"actions\"][name]\n for param in list(sig.parameters)[2:-1]:\n if param not in params:\n raise_error(AttributeError, f\"Missing parameter {param} in runcard.\")\n if f.__annotations__[\"qubit\"] == int:\n single_qubit_action = True\n else:\n single_qubit_action = False\n\n return f, params, path, single_qubit_action\n\n def execute(self):\n \"\"\"Method to execute sequentially all the actions in the runcard.\"\"\"\n if self.platform is not None:\n self.platform.connect()\n self.platform.setup()\n self.platform.start()\n\n for action in self.runcard[\"actions\"]:\n routine, args, path, single_qubit_action = self._build_single_action(action)\n self._execute_single_action(routine, args, path, single_qubit_action)\n\n if self.platform is not None:\n self.platform.stop()\n self.platform.disconnect()\n\n def _execute_single_action(self, routine, arguments, path, single_qubit):\n \"\"\"Method to execute a single action and retrieving the results.\"\"\"\n if self.format is None:\n raise_error(ValueError, f\"Cannot store data using {self.format} format.\")\n if single_qubit:\n for qubit in self.qubits:\n results = routine(self.platform, qubit, **arguments)\n\n for data in results:\n getattr(data, f\"to_{self.format}\")(path)\n\n if self.platform is not None:\n self.update_platform_runcard(qubit, routine.__name__)\n else:\n results = routine(self.platform, self.qubits, **arguments)\n\n for data in results:\n getattr(data, f\"to_{self.format}\")(path)\n\n if self.platform is not None:\n self.update_platform_runcard(qubit, routine.__name__)\n\n def update_platform_runcard(self, qubit, routine):\n\n try:\n data_fit = Data.load_data(\n self.folder, routine, self.format, f\"fit_q{qubit}\"\n )\n except:\n data_fit = Data()\n\n params = [i for i in list(data_fit.df.keys()) if \"popt\" not in i]\n settings = load_yaml(f\"{self.folder}/new_platform.yml\")\n\n for param in params:\n settings[\"characterization\"][\"single_qubit\"][qubit][param] = int(\n data_fit.get_values(param)\n )\n\n with open(f\"{self.folder}/new_platform.yml\", \"w\") as file:\n yaml.dump(\n settings, file, sort_keys=False, indent=4, default_flow_style=None\n )\n\n def dump_report(self):\n from qibocal.web.report import create_report\n\n # update end time\n meta = load_yaml(f\"{self.folder}/meta.yml\")\n e = datetime.datetime.now(datetime.timezone.utc)\n meta[\"end-time\"] = e.strftime(\"%H:%M:%S\")\n with open(f\"{self.folder}/meta.yml\", \"w\") as file:\n yaml.dump(meta, file)\n\n create_report(self.folder)\n\n\nclass ReportBuilder:\n \"\"\"Parses routines and plots to report and live plotting page.\n\n Args:\n path (str): Path to the data folder to generate report for.\n \"\"\"\n\n def __init__(self, path):\n self.path = path\n self.metadata = load_yaml(os.path.join(path, \"meta.yml\"))\n\n # find proper path title\n base, self.title = os.path.join(os.getcwd(), path), \"\"\n while self.title in (\"\", \".\"):\n base, self.title = os.path.split(base)\n\n self.runcard = load_yaml(os.path.join(path, \"runcard.yml\"))\n self.format = self.runcard.get(\"format\")\n self.qubits = self.runcard.get(\"qubits\")\n\n # create calibration routine objects\n # (could be incorporated to :meth:`qibocal.cli.builders.ActionBuilder._build_single_action`)\n self.routines = []\n for action in self.runcard.get(\"actions\"):\n if hasattr(calibrations, action):\n routine = getattr(calibrations, action)\n else:\n raise_error(ValueError, f\"Undefined action {action} in report.\")\n\n if not hasattr(routine, \"plots\"):\n routine.plots = []\n self.routines.append(routine)\n\n def get_routine_name(self, routine):\n \"\"\"Prettify routine's name for report headers.\"\"\"\n return routine.__name__.replace(\"_\", \" \").title()\n\n def get_figure(self, routine, method, qubit):\n \"\"\"Get html figure for report.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n import tempfile\n\n figure = method(self.path, routine.__name__, qubit, self.format)\n with tempfile.NamedTemporaryFile() as temp:\n figure.write_html(temp.name, include_plotlyjs=False, full_html=False)\n fightml = temp.read().decode(\"utf-8\")\n return fightml\n\n def get_live_figure(self, routine, method, qubit):\n \"\"\"Get url to dash page for live plotting.\n\n This url is used by :meth:`qibocal.web.app.get_graph`.\n\n Args:\n routine (Callable): Calibration method.\n method (Callable): Plot method.\n qubit (int): Qubit id.\n \"\"\"\n return os.path.join(\n method.__name__,\n self.path,\n routine.__name__,\n str(qubit),\n self.format,\n )\n\nsrc/qibocal/cli/_base.py METASEP\n\"\"\"Adds global CLI options.\"\"\"\nimport base64\nimport pathlib\nimport shutil\nimport socket\nimport subprocess\nimport uuid\nfrom urllib.parse import urljoin\n\nimport click\nfrom qibo.config import log, raise_error\n\nfrom qibocal.cli.builders import ActionBuilder\n\nCONTEXT_SETTINGS = dict(help_option_names=[\"-h\", \"--help\"])\n\n# options for report upload\nUPLOAD_HOST = (\n \"qibocal@localhost\"\n if socket.gethostname() == \"saadiyat\"\n else \"qibocal@login.qrccluster.com\"\n)\nTARGET_DIR = \"qibocal-reports/\"\nROOT_URL = \"http://login.qrccluster.com:9000/\"\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"runcard\", metavar=\"RUNCARD\", type=click.Path(exists=True))\n@click.option(\n \"folder\",\n \"-o\",\n type=click.Path(),\n help=\"Output folder. If not provided a standard name will generated.\",\n)\n@click.option(\n \"force\",\n \"-f\",\n is_flag=True,\n help=\"Use --force option to overwrite the output folder.\",\n)\ndef command(runcard, folder, force=None):\n\n \"\"\"qibocal: Quantum Calibration Verification and Validation using Qibo.\n\n Arguments:\n\n - RUNCARD: runcard with declarative inputs.\n \"\"\"\n\n builder = ActionBuilder(runcard, folder, force)\n builder.execute()\n builder.dump_report()\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.option(\n \"port\",\n \"-p\",\n \"--port\",\n default=8050,\n type=int,\n help=\"Localhost port to launch dash server.\",\n)\n@click.option(\n \"debug\",\n \"-d\",\n \"--debug\",\n is_flag=True,\n help=\"Launch server in debugging mode.\",\n)\ndef live_plot(port, debug):\n \"\"\"Real time plotting of calibration data on a dash server.\"\"\"\n import socket\n\n from qibocal.web.app import app\n\n # change port if it is already used\n while True:\n with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:\n if s.connect_ex((\"localhost\", port)) != 0:\n break\n port += 1\n\n app.run_server(debug=debug, port=port)\n\n\n@click.command(context_settings=CONTEXT_SETTINGS)\n@click.argument(\"output_folder\", metavar=\"FOLDER\", type=click.Path(exists=True))\ndef upload(output_folder):\n \"\"\"Uploads output folder to server\"\"\"\n\n output_path = pathlib.Path(output_folder)\n\n # check the rsync command exists.\n if not shutil.which(\"rsync\"):\n raise_error(\n RuntimeError,\n \"Could not find the rsync command. Please make sure it is installed.\",\n )\n\n # check that we can authentica with a certificate\n ssh_command_line = (\n \"ssh\",\n \"-o\",\n \"PreferredAuthentications=publickey\",\n \"-q\",\n UPLOAD_HOST,\n \"exit\",\n )\n\n str_line = \" \".join(repr(ele) for ele in ssh_command_line)\n\n log.info(f\"Checking SSH connection to {UPLOAD_HOST}.\")\n\n try:\n subprocess.run(ssh_command_line, check=True)\n except subprocess.CalledProcessError as e:\n raise RuntimeError(\n (\n \"Could not validate the SSH key. \"\n \"The command\\n%s\\nreturned a non zero exit status. \"\n \"Please make sure that your public SSH key is on the server.\"\n )\n % str_line\n ) from e\n except OSError as e:\n raise RuntimeError(\n \"Could not run the command\\n{}\\n: {}\".format(str_line, e)\n ) from e\n\n log.info(\"Connection seems OK.\")\n\n # upload output\n randname = base64.urlsafe_b64encode(uuid.uuid4().bytes).decode()\n newdir = TARGET_DIR + randname\n\n rsync_command = (\n \"rsync\",\n \"-aLz\",\n \"--chmod=ug=rwx,o=rx\",\n f\"{output_path}/\",\n f\"{UPLOAD_HOST}:{newdir}\",\n )\n\n log.info(f\"Uploading output ({output_path}) to {UPLOAD_HOST}\")\n try:\n subprocess.run(rsync_command, check=True)\n except subprocess.CalledProcessError as e:\n msg = f\"Failed to upload output: {e}\"\n raise RuntimeError(msg) from e\n\n url = urljoin(ROOT_URL, randname)\n log.info(f\"Upload completed. The result is available at:\\n{url}\")\n\nsrc/qibocal/cli/__init__.py METASEP\n\"\"\"CLI entry point.\"\"\"\nfrom ._base import command, live_plot, upload\n\nsrc/qibocal/calibrations/characterization/utils.py METASEP\nimport numpy as np\n\n\ndef variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n):\n \"\"\"Helper function for sweeps.\"\"\"\n return np.concatenate(\n (\n np.arange(-lowres_width, -highres_width, lowres_step),\n np.arange(-highres_width, highres_width, highres_step),\n np.arange(highres_width, lowres_width, lowres_step),\n )\n )\n\nsrc/qibocal/calibrations/characterization/t1.py METASEP\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import t1_fit\n\n\n@plot(\"MSR vs Time\", plots.t1_time_msr_phase)\ndef t1(\n platform: AbstractPlatform,\n qubit: int,\n delay_before_readout_start,\n delay_before_readout_end,\n delay_before_readout_step,\n software_averages,\n points=10,\n):\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n ro_wait_range = np.arange(\n delay_before_readout_start, delay_before_readout_end, delay_before_readout_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n count = 0\n for _ in range(software_averages):\n for wait in ro_wait_range:\n if count % points == 0 and count > 0:\n yield data\n yield t1_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"t1\"],\n )\n ro_pulse.start = qd_pulse.duration + wait\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": wait,\n }\n data.add(results)\n count += 1\n yield data\n\nsrc/qibocal/calibrations/characterization/resonator_spectroscopy.py METASEP\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.calibrations.characterization.utils import variable_resolution_scanrange\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef resonator_spectroscopy(\n platform: AbstractPlatform,\n qubit: int,\n lowres_width,\n lowres_step,\n highres_width,\n highres_step,\n precision_width,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n variable_resolution_scanrange(\n lowres_width, lowres_step, highres_width, highres_step\n )\n + resonator_frequency\n )\n fast_sweep_data = DataUnits(\n name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield fast_sweep_data\n yield lorentzian_fit(\n fast_sweep_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n fast_sweep_data.add(results)\n count += 1\n yield fast_sweep_data\n\n if platform.resonator_type == \"3D\":\n resonator_frequency = fast_sweep_data.get_values(\"frequency\", \"Hz\")[\n np.argmax(fast_sweep_data.get_values(\"MSR\", \"V\"))\n ]\n avg_voltage = (\n np.mean(\n fast_sweep_data.get_values(\"MSR\", \"V\")[: (lowres_width // lowres_step)]\n )\n * 1e6\n )\n else:\n resonator_frequency = fast_sweep_data.get_values(\"frequency\", \"Hz\")[\n np.argmin(fast_sweep_data.get_values(\"MSR\", \"V\"))\n ]\n avg_voltage = (\n np.mean(\n fast_sweep_data.get_values(\"MSR\", \"V\")[: (lowres_width // lowres_step)]\n )\n * 1e6\n )\n\n precision_sweep__data = DataUnits(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(-precision_width, precision_width, precision_step)\n + resonator_frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield precision_sweep__data\n yield lorentzian_fit(\n fast_sweep_data + precision_sweep__data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n precision_sweep__data.add(results)\n count += 1\n yield precision_sweep__data\n\n\n@plot(\"Frequency vs Attenuation\", plots.frequency_attenuation_msr_phase)\n@plot(\"MSR vs Frequency\", plots.frequency_attenuation_msr_phase__cut)\ndef resonator_punchout(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n min_att,\n max_att,\n step_att,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"}\n )\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence = PulseSequence()\n sequence.add(ro_pulse)\n\n # TODO: move this explicit instruction to the platform\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step)\n + resonator_frequency\n - (freq_width / 4)\n )\n attenuation_range = np.flip(np.arange(min_att, max_att, step_att))\n count = 0\n for _ in range(software_averages):\n for att in attenuation_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n # TODO: move these explicit instructions to the platform\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.ro_port[qubit].attenuation = att\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr * (np.exp(att / 10)),\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"attenuation[dB]\": att,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR and Phase vs Flux Current\", plots.frequency_flux_msr_phase)\ndef resonator_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline=0,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n # TODO: automatically extract the sweet spot current\n # TODO: add a method to generate the matrix\n\n\n@plot(\"MSR row 1 and Phase row 2\", plots.frequency_flux_msr_phase__matrix)\ndef resonator_spectroscopy_flux_matrix(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_min,\n current_max,\n current_step,\n fluxlines,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n current_range = np.arange(current_min, current_max, current_step)\n\n count = 0\n for fluxline in fluxlines:\n fluxline = int(fluxline)\n print(fluxline)\n data = DataUnits(\n name=f\"data_q{qubit}_f{fluxline}\",\n quantities={\"frequency\": \"Hz\", \"current\": \"A\"},\n )\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.dispersive_frequency_msr_phase)\ndef dispersive_shift(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n\n sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n sequence.add(ro_pulse)\n\n resonator_frequency = platform.characterization[\"single_qubit\"][qubit][\n \"resonator_freq\"\n ]\n\n frequency_range = (\n np.arange(-freq_width, freq_width, freq_step) + resonator_frequency\n )\n\n data_spec = DataUnits(name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield data_spec\n yield lorentzian_fit(\n data_spec,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n )\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data_spec.add(results)\n count += 1\n yield data_spec\n\n # Shifted Spectroscopy\n sequence = PulseSequence()\n RX_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX_pulse.finish)\n sequence.add(RX_pulse)\n sequence.add(ro_pulse)\n\n data_shifted = DataUnits(\n name=f\"data_shifted_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n count = 0\n for _ in range(software_averages):\n for freq in frequency_range:\n if count % points == 0 and count > 0:\n yield data_shifted\n yield lorentzian_fit(\n data_shifted,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"resonator_freq\", \"peak_voltage\"],\n fit_file_name=\"fit_shifted\",\n )\n platform.ro_port[qubit].lo_frequency = freq - ro_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data_shifted.add(results)\n count += 1\n yield data_shifted\n\nsrc/qibocal/calibrations/characterization/ramsey.py METASEP\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import ramsey_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr)\ndef ramsey_frequency_detuned(\n platform: AbstractPlatform,\n qubit: int,\n t_start,\n t_end,\n t_step,\n n_osc,\n points=10,\n):\n platform.reload_settings()\n sampling_rate = platform.sampling_rate\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n\n RX90_pulse1 = platform.create_RX90_pulse(qubit, start=0)\n RX90_pulse2 = platform.create_RX90_pulse(qubit, start=RX90_pulse1.finish)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX90_pulse2.finish)\n\n sequence = PulseSequence()\n sequence.add(RX90_pulse1)\n sequence.add(RX90_pulse2)\n sequence.add(ro_pulse)\n\n runcard_qubit_freq = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n runcard_T2 = platform.characterization[\"single_qubit\"][qubit][\"T2\"]\n intermediate_freq = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"frequency\"\n ]\n\n current_qubit_freq = runcard_qubit_freq\n current_T2 = runcard_T2\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX90_pulse1.frequency\n )\n\n t_end = np.array(t_end)\n for t_max in t_end:\n count = 0\n platform.qd_port[qubit].lo_frequency = current_qubit_freq - intermediate_freq\n offset_freq = n_osc / t_max * sampling_rate # Hz\n t_range = np.arange(t_start, t_max, t_step)\n for wait in t_range:\n if count % points == 0 and count > 0:\n yield data\n yield ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=current_qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=offset_freq,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n RX90_pulse2.start = RX90_pulse1.finish + wait\n RX90_pulse2.relative_phase = (\n (RX90_pulse2.start / sampling_rate) * (2 * np.pi) * (-offset_freq)\n )\n ro_pulse.start = RX90_pulse2.finish\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"wait[ns]\": wait,\n \"t_max[ns]\": t_max,\n }\n data.add(results)\n count += 1\n\n # # Fitting\n data_fit = ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=current_qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=offset_freq,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n\n new_t2 = data_fit.get_values(\"t2\")\n corrected_qubit_freq = data_fit.get_values(\"corrected_qubit_frequency\")\n\n # if ((new_t2 * 3.5) > t_max):\n if (new_t2 > current_T2).bool() and len(t_end) > 1:\n current_qubit_freq = int(corrected_qubit_freq)\n current_T2 = new_t2\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"}\n )\n else:\n corrected_qubit_freq = int(current_qubit_freq)\n new_t2 = current_T2\n break\n\n yield data\n\n\n@plot(\"MSR vs Time\", plots.time_msr)\ndef ramsey(\n platform: AbstractPlatform,\n qubit: int,\n delay_between_pulses_start,\n delay_between_pulses_end,\n delay_between_pulses_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n sampling_rate = platform.sampling_rate\n qubit_freq = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n RX90_pulse1 = platform.create_RX90_pulse(qubit, start=0)\n RX90_pulse2 = platform.create_RX90_pulse(qubit, start=RX90_pulse1.finish)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX90_pulse2.finish)\n\n sequence = PulseSequence()\n sequence.add(RX90_pulse1)\n sequence.add(RX90_pulse2)\n sequence.add(ro_pulse)\n\n waits = np.arange(\n delay_between_pulses_start,\n delay_between_pulses_end,\n delay_between_pulses_step,\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX90_pulse1.frequency\n )\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"wait\": \"ns\", \"t_max\": \"ns\"})\n count = 0\n for _ in range(software_averages):\n for wait in waits:\n if count % points == 0 and count > 0:\n yield data\n yield ramsey_fit(\n data,\n x=\"wait[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n qubit_freq=qubit_freq,\n sampling_rate=sampling_rate,\n offset_freq=0,\n labels=[\n \"delta_frequency\",\n \"corrected_qubit_frequency\",\n \"t2\",\n ],\n )\n RX90_pulse2.start = RX90_pulse1.finish + wait\n ro_pulse.start = RX90_pulse2.finish\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"wait[ns]\": wait,\n \"t_max[ns]\": delay_between_pulses_end,\n }\n data.add(results)\n count += 1\n yield data\n\nsrc/qibocal/calibrations/characterization/rabi_oscillations.py METASEP\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import rabi_fit\n\n\n@plot(\"MSR vs Time\", plots.time_msr_phase)\ndef rabi_pulse_length(\n platform: AbstractPlatform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"Time\": \"ns\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"Time[ns]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_duration\",\n \"pi_pulse_max_voltage\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"Time[ns]\": duration,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Gain\", plots.gain_msr_phase)\ndef rabi_pulse_gain(\n platform: AbstractPlatform,\n qubit: int,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"gain\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.finish)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"gain[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_gain\",\n \"pi_pulse_max_voltage\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs Amplitude\", plots.amplitude_msr_phase)\ndef rabi_pulse_amplitude(\n platform,\n qubit: int,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"amplitude\": \"dimensionless\"})\n\n sequence = PulseSequence()\n qd_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=qd_pulse.duration)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0 and count > 0:\n yield data\n yield rabi_fit(\n data,\n x=\"amplitude[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"pi_pulse_amplitude\",\n \"pi_pulse_max_voltage\",\n ],\n )\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n yield data\n\n\n@plot(\"MSR vs length and gain\", plots.duration_gain_msr_phase)\ndef rabi_pulse_length_and_gain(\n platform: AbstractPlatform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_gain_start,\n pulse_gain_end,\n pulse_gain_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"duration\": \"ns\", \"gain\": \"dimensionless\"}\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_gain_range = np.arange(pulse_gain_start, pulse_gain_end, pulse_gain_step)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for gain in qd_pulse_gain_range:\n platform.qd_port[qubit].gain = gain\n if count % points == 0 and count > 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"gain[dimensionless]\": gain,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\n@plot(\"MSR vs length and amplitude\", plots.duration_amplitude_msr_phase)\ndef rabi_pulse_length_and_amplitude(\n platform,\n qubit: int,\n pulse_duration_start,\n pulse_duration_end,\n pulse_duration_step,\n pulse_amplitude_start,\n pulse_amplitude_end,\n pulse_amplitude_step,\n software_averages,\n points=10,\n):\n platform.reload_settings()\n\n data = DataUnits(\n name=f\"data_q{qubit}\",\n quantities={\"duration\": \"ns\", \"amplitude\": \"dimensionless\"},\n )\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=4)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qd_pulse_duration_range = np.arange(\n pulse_duration_start, pulse_duration_end, pulse_duration_step\n )\n qd_pulse_amplitude_range = np.arange(\n pulse_amplitude_start, pulse_amplitude_end, pulse_amplitude_step\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n for duration in qd_pulse_duration_range:\n qd_pulse.duration = duration\n ro_pulse.start = duration\n for amplitude in qd_pulse_amplitude_range:\n qd_pulse.amplitude = amplitude\n if count % points == 0:\n yield data\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"duration[ns]\": duration,\n \"amplitude[dimensionless]\": amplitude,\n }\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qibocal/calibrations/characterization/qubit_spectroscopy.py METASEP\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import lorentzian_fit\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_msr_phase__fast_precision)\ndef qubit_spectroscopy(\n platform: AbstractPlatform,\n qubit: int,\n fast_start,\n fast_end,\n fast_step,\n precision_start,\n precision_end,\n precision_step,\n software_averages,\n points=10,\n):\n\n platform.reload_settings()\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n\n freqrange = np.arange(fast_start, fast_end, fast_step) + qubit_frequency\n\n data = DataUnits(quantities={\"frequency\": \"Hz\", \"attenuation\": \"dB\"})\n\n # FIXME: Waiting for Qblox platform to take care of that\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n\n data = DataUnits(name=f\"fast_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"})\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield data\n yield lorentzian_fit(\n data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n data.add(results)\n count += 1\n yield data\n\n if platform.resonator_type == \"3D\":\n qubit_frequency = data.get_values(\"frequency\", \"Hz\")[\n np.argmin(data.get_values(\"MSR\", \"V\"))\n ]\n avg_voltage = (\n np.mean(\n data.get_values(\"MSR\", \"V\")[: ((fast_end - fast_start) // fast_step)]\n )\n * 1e6\n )\n else:\n qubit_frequency = data.get_values(\"frequency\", \"Hz\")[\n np.argmax(data.get_values(\"MSR\", \"V\"))\n ]\n avg_voltage = (\n np.mean(\n data.get_values(\"MSR\", \"V\")[: ((fast_end - fast_start) // fast_step)]\n )\n * 1e6\n )\n\n prec_data = DataUnits(\n name=f\"precision_sweep_q{qubit}\", quantities={\"frequency\": \"Hz\"}\n )\n freqrange = (\n np.arange(precision_start, precision_end, precision_step) + qubit_frequency\n )\n count = 0\n for _ in range(software_averages):\n for freq in freqrange:\n if count % points == 0 and count > 0:\n yield prec_data\n yield lorentzian_fit(\n data + prec_data,\n x=\"frequency[GHz]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\"qubit_freq\", \"peak_voltage\"],\n )\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n }\n prec_data.add(results)\n count += 1\n yield prec_data\n # TODO: Estimate avg_voltage correctly\n\n\n@plot(\"MSR and Phase vs Frequency\", plots.frequency_flux_msr_phase)\ndef qubit_spectroscopy_flux(\n platform: AbstractPlatform,\n qubit: int,\n freq_width,\n freq_step,\n current_max,\n current_min,\n current_step,\n software_averages,\n fluxline,\n points=10,\n):\n platform.reload_settings()\n\n if fluxline == \"qubit\":\n fluxline = qubit\n\n sequence = PulseSequence()\n qd_pulse = platform.create_qubit_drive_pulse(qubit, start=0, duration=5000)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=5000)\n sequence.add(qd_pulse)\n sequence.add(ro_pulse)\n\n data = DataUnits(\n name=f\"data_q{qubit}\", quantities={\"frequency\": \"Hz\", \"current\": \"A\"}\n )\n\n qubit_frequency = platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n qubit_biasing_current = platform.characterization[\"single_qubit\"][qubit][\n \"sweetspot\"\n ]\n frequency_range = np.arange(-freq_width, freq_width, freq_step) + qubit_frequency\n current_range = (\n np.arange(current_min, current_max, current_step) + qubit_biasing_current\n )\n\n count = 0\n for _ in range(software_averages):\n for curr in current_range:\n for freq in frequency_range:\n if count % points == 0:\n yield data\n platform.qd_port[qubit].lo_frequency = freq - qd_pulse.frequency\n platform.qf_port[fluxline].current = curr\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"frequency[Hz]\": freq,\n \"current[A]\": curr,\n }\n # TODO: implement normalization\n data.add(results)\n count += 1\n\n yield data\n\nsrc/qibocal/calibrations/characterization/flipping.py METASEP\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import flipping_fit\n\n\n@plot(\"MSR vs Flips\", plots.flips_msr_phase)\ndef flipping(\n platform: AbstractPlatform,\n qubit: int,\n niter,\n step,\n points=10,\n):\n platform.reload_settings()\n pi_pulse_amplitude = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"amplitude\"\n ]\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"flips\": \"dimensionless\"})\n\n sequence = PulseSequence()\n RX90_pulse = platform.create_RX90_pulse(qubit, start=0)\n\n count = 0\n # repeat N iter times\n for n in range(0, niter, step):\n if count % points == 0 and count > 0:\n yield data\n yield flipping_fit(\n data,\n x=\"flips[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n niter=niter,\n pi_pulse_amplitude=pi_pulse_amplitude,\n labels=[\"amplitude_delta\", \"corrected_amplitude\"],\n )\n sequence.add(RX90_pulse)\n # execute sequence RX(pi/2) - [RX(pi) - RX(pi)] from 0...n times - RO\n start1 = RX90_pulse.duration\n for j in range(n):\n RX_pulse1 = platform.create_RX_pulse(qubit, start=start1)\n start2 = start1 + RX_pulse1.duration\n RX_pulse2 = platform.create_RX_pulse(qubit, start=start2)\n sequence.add(RX_pulse1)\n sequence.add(RX_pulse2)\n start1 = start2 + RX_pulse2.duration\n\n # add ro pulse at the end of the sequence\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=start1)\n sequence.add(ro_pulse)\n\n msr, phase, i, q = platform.execute_pulse_sequence(sequence)[ro_pulse.serial]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"flips[dimensionless]\": n,\n }\n data.add(results)\n count += 1\n sequence = PulseSequence()\n\n yield data\n\nsrc/qibocal/calibrations/characterization/calibrate_qubit_states.py METASEP\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\n\n\n@plot(\"exc vs gnd\", plots.exc_gnd)\ndef calibrate_qubit_states(\n platform: AbstractPlatform,\n qubit: int,\n niter,\n points=10,\n):\n\n # create exc sequence\n exc_sequence = PulseSequence()\n RX_pulse = platform.create_RX_pulse(qubit, start=0)\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=RX_pulse.duration)\n exc_sequence.add(RX_pulse)\n exc_sequence.add(ro_pulse)\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse.frequency\n )\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - RX_pulse.frequency\n )\n\n data_exc = DataUnits(name=f\"data_exc_q{qubit}\", quantities={\"iteration\": \"s\"})\n\n count = 0\n for n in np.arange(niter):\n if count % points == 0:\n yield data_exc\n msr, phase, i, q = platform.execute_pulse_sequence(exc_sequence, nshots=1)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"iteration[s]\": n,\n }\n data_exc.add(results)\n count += 1\n yield data_exc\n\n gnd_sequence = PulseSequence()\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=0)\n gnd_sequence.add(ro_pulse)\n\n data_gnd = DataUnits(name=f\"data_gnd_q{qubit}\", quantities={\"iteration\": \"s\"})\n count = 0\n for n in np.arange(niter):\n if count % points == 0:\n yield data_gnd\n\n msr, phase, i, q = platform.execute_pulse_sequence(gnd_sequence, nshots=1)[\n ro_pulse.serial\n ]\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[deg]\": phase,\n \"iteration[s]\": n,\n }\n data_gnd.add(results)\n count += 1\n yield data_gnd\n\nsrc/qibocal/calibrations/characterization/allXY.py METASEP\nimport numpy as np\nfrom qibolab.platforms.abstract import AbstractPlatform\nfrom qibolab.pulses import PulseSequence\n\nfrom qibocal import plots\nfrom qibocal.data import DataUnits\nfrom qibocal.decorators import plot\nfrom qibocal.fitting.methods import drag_tunning_fit\n\n# allXY rotations\ngatelist = [\n [\"I\", \"I\"],\n [\"RX(pi)\", \"RX(pi)\"],\n [\"RY(pi)\", \"RY(pi)\"],\n [\"RX(pi)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RX(pi)\"],\n [\"RX(pi/2)\", \"I\"],\n [\"RY(pi/2)\", \"I\"],\n [\"RX(pi/2)\", \"RY(pi/2)\"],\n [\"RY(pi/2)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RY(pi)\"],\n [\"RY(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RY(pi/2)\"],\n [\"RY(pi)\", \"RX(pi/2)\"],\n [\"RX(pi/2)\", \"RX(pi)\"],\n [\"RX(pi)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi)\"],\n [\"RY(pi)\", \"RY(pi/2)\"],\n [\"RX(pi)\", \"I\"],\n [\"RY(pi)\", \"I\"],\n [\"RX(pi/2)\", \"RX(pi/2)\"],\n [\"RY(pi/2)\", \"RY(pi/2)\"],\n]\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate)\ndef allXY(\n platform: AbstractPlatform,\n qubit: int,\n beta_param=None,\n software_averages=1,\n points=10,\n):\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = DataUnits(\n name=f\"data_q{qubit}\",\n quantities={\"probability\": \"dimensionless\", \"gateNumber\": \"dimensionless\"},\n )\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n ro_pulse_test = platform.create_qubit_readout_pulse(qubit, start=4)\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse_test.frequency\n )\n\n qd_pulse_test = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse_test.frequency\n )\n\n count = 0\n for _ in range(software_averages):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=2048)[\n ro_pulse.serial\n ]\n\n prob = np.abs(msr * 1e6 - state1_voltage) / np.abs(\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": gateNumber,\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"Prob vs gate sequence\", plots.prob_gate_iteration)\ndef allXY_iteration(\n platform: AbstractPlatform,\n qubit: int,\n beta_start,\n beta_end,\n beta_step,\n software_averages=1,\n points=10,\n):\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n ro_pulse_test = platform.create_qubit_readout_pulse(qubit, start=4)\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse_test.frequency\n )\n\n qd_pulse_test = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse_test.frequency\n )\n\n state0_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state0_voltage\"]\n )\n state1_voltage = complex(\n platform.characterization[\"single_qubit\"][qubit][\"state1_voltage\"]\n )\n\n data = DataUnits(\n name=f\"data_q{qubit}\",\n quantities={\n \"probability\": \"dimensionless\",\n \"gateNumber\": \"dimensionless\",\n \"beta_param\": \"dimensionless\",\n },\n )\n\n count = 0\n for _ in range(software_averages):\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n gateNumber = 1\n for gates in gatelist:\n if count % points == 0 and count > 0:\n yield data\n seq, ro_pulse = _get_sequence_from_gate_pair(\n platform, gates, qubit, beta_param\n )\n seq.add(ro_pulse)\n msr, phase, i, q = platform.execute_pulse_sequence(seq, nshots=1024)[\n ro_pulse.serial\n ]\n\n prob = np.abs(msr * 1e6 - state1_voltage) / np.abs(\n state1_voltage - state0_voltage\n )\n prob = (2 * prob) - 1\n\n results = {\n \"MSR[V]\": msr,\n \"i[V]\": i,\n \"q[V]\": q,\n \"phase[rad]\": phase,\n \"probability[dimensionless]\": prob,\n \"gateNumber[dimensionless]\": gateNumber,\n \"beta_param[dimensionless]\": beta_param,\n }\n data.add(results)\n count += 1\n gateNumber += 1\n yield data\n\n\n@plot(\"MSR vs beta parameter\", plots.msr_beta)\ndef drag_pulse_tunning(\n platform: AbstractPlatform,\n qubit: int,\n beta_start,\n beta_end,\n beta_step,\n points=10,\n):\n\n # platform.reload_settings()\n\n # FIXME: Waiting to be able to pass qpucard to qibolab\n ro_pulse_test = platform.create_qubit_readout_pulse(qubit, start=4)\n platform.ro_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"resonator_freq\"]\n - ro_pulse_test.frequency\n )\n\n qd_pulse_test = platform.create_qubit_drive_pulse(qubit, start=0, duration=4)\n platform.qd_port[qubit].lo_frequency = (\n platform.characterization[\"single_qubit\"][qubit][\"qubit_freq\"]\n - qd_pulse_test.frequency\n )\n\n data = DataUnits(name=f\"data_q{qubit}\", quantities={\"beta_param\": \"dimensionless\"})\n\n count = 0\n for beta_param in np.arange(beta_start, beta_end, beta_step).round(4):\n if count % points == 0 and count > 0:\n yield data\n yield drag_tunning_fit(\n data,\n x=\"beta_param[dimensionless]\",\n y=\"MSR[uV]\",\n qubit=qubit,\n nqubits=platform.settings[\"nqubits\"],\n labels=[\n \"optimal_beta_param\",\n ],\n )\n # drag pulse RX(pi/2)\n RX90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, beta=beta_param\n )\n # drag pulse RY(pi)\n RY_drag_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=RX90_drag_pulse.finish,\n relative_phase=+np.pi / 2,\n beta=beta_param,\n )\n # RO pulse\n ro_pulse = platform.create_qubit_readout_pulse(\n qubit, start=RY_drag_pulse.finish\n )\n\n # Rx(pi/2) - Ry(pi) - Ro\n seq1 = PulseSequence()\n seq1.add(RX90_drag_pulse)\n seq1.add(RY_drag_pulse)\n seq1.add(ro_pulse)\n msr1, i1, q1, phase1 = platform.execute_pulse_sequence(seq1)[ro_pulse.serial]\n\n # drag pulse RY(pi/2)\n RY90_drag_pulse = platform.create_RX90_drag_pulse(\n qubit, start=0, relative_phase=np.pi / 2, beta=beta_param\n )\n # drag pulse RX(pi)\n RX_drag_pulse = platform.create_RX_drag_pulse(\n qubit, start=RY90_drag_pulse.finish, beta=beta_param\n )\n\n # Ry(pi/2) - Rx(pi) - Ro\n seq2 = PulseSequence()\n seq2.add(RY90_drag_pulse)\n seq2.add(RX_drag_pulse)\n seq2.add(ro_pulse)\n msr2, phase2, i2, q2 = platform.execute_pulse_sequence(seq2)[ro_pulse.serial]\n results = {\n \"MSR[V]\": msr1 - msr2,\n \"i[V]\": i1 - i2,\n \"q[V]\": q1 - q2,\n \"phase[deg]\": phase1 - phase2,\n \"beta_param[dimensionless]\": beta_param,\n }\n data.add(results)\n count += 1\n\n yield data\n\n\ndef _get_sequence_from_gate_pair(platform: AbstractPlatform, gates, qubit, beta_param):\n\n pulse_duration = platform.settings[\"native_gates\"][\"single_qubit\"][qubit][\"RX\"][\n \"duration\"\n ]\n # All gates have equal pulse duration\n\n sequence = PulseSequence()\n\n sequenceDuration = 0\n pulse_start = 0\n\n for gate in gates:\n if gate == \"I\":\n # print(\"Transforming to sequence I gate\")\n pass\n\n if gate == \"RX(pi)\":\n # print(\"Transforming to sequence RX(pi) gate\")\n if beta_param == None:\n RX_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX_pulse)\n\n if gate == \"RX(pi/2)\":\n # print(\"Transforming to sequence RX(pi/2) gate\")\n if beta_param == None:\n RX90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n )\n else:\n RX90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n beta=beta_param,\n )\n sequence.add(RX90_pulse)\n\n if gate == \"RY(pi)\":\n # print(\"Transforming to sequence RY(pi) gate\")\n if beta_param == None:\n RY_pulse = platform.create_RX_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY_pulse = platform.create_RX_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY_pulse)\n\n if gate == \"RY(pi/2)\":\n # print(\"Transforming to sequence RY(pi/2) gate\")\n if beta_param == None:\n RY90_pulse = platform.create_RX90_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n )\n else:\n RY90_pulse = platform.create_RX90_drag_pulse(\n qubit,\n start=pulse_start,\n relative_phase=np.pi / 2,\n beta=beta_param,\n )\n sequence.add(RY90_pulse)\n\n sequenceDuration = sequenceDuration + pulse_duration\n pulse_start = pulse_duration\n\n # RO pulse starting just after pair of gates\n ro_pulse = platform.create_qubit_readout_pulse(qubit, start=sequenceDuration + 4)\n\n return sequence, ro_pulse\n\nsrc/qibocal/calibrations/characterization/__init__.py METASEP\n\nsrc/qibocal/decorators.py METASEP\n\"\"\"Decorators implementation.\"\"\"\nimport os\n\nfrom qibocal.config import raise_error\n\n\ndef plot(header, method):\n \"\"\"Decorator for adding plots in the report and live plotting page.\n\n Args:\n header (str): Header of the plot to use in the report.\n method (Callable): Plotting method defined under ``qibocal.plots``.\n \"\"\"\n\n def wrapped(f):\n if hasattr(f, \"plots\"):\n # insert in the beginning of the list to have\n # proper plot ordering in the report\n f.plots.insert(0, (header, method))\n else:\n f.plots = [(header, method)]\n return f\n\n return wrapped\n\nsrc/qibocal/data.py METASEP\n\"\"\"Implementation of DataUnits and Data class to store calibration routines outputs.\"\"\"\n\nimport re\nfrom abc import abstractmethod\n\nimport numpy as np\nimport pandas as pd\nimport pint_pandas\n\nfrom qibocal.config import raise_error\n\n\nclass AbstractData:\n def __init__(self, name=None):\n\n if name is None:\n self.name = \"data\"\n else:\n self.name = name\n\n self.df = pd.DataFrame()\n self.quantities = None\n\n def __add__(self, data):\n self.df = pd.concat([self.df, data.df], ignore_index=True)\n return self\n\n @abstractmethod\n def add(self, data):\n raise_error(NotImplementedError)\n\n def __len__(self):\n \"\"\"Computes the length of the data.\"\"\"\n return len(self.df)\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n raise_error(NotImplementedError)\n\n @abstractmethod\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n if self.quantities == None:\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n else:\n self.df.pint.dequantify().to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\n\nclass DataUnits(AbstractData):\n \"\"\"Class to store the data measured during the calibration routines.\n It is a wrapper to a pandas DataFrame with units of measure from the Pint\n library.\n\n Args:\n quantities (dict): dictionary containing additional quantities that the user\n may save other than the pulse sequence output. The keys are the name of the\n quantities and the corresponding values are the units of measure.\n options (list): list containing additional values to be saved.\n \"\"\"\n\n def __init__(self, name=None, quantities=None, options=None):\n\n super().__init__(name=name)\n\n self._df = pd.DataFrame(\n {\n \"MSR\": pd.Series(dtype=\"pint[V]\"),\n \"i\": pd.Series(dtype=\"pint[V]\"),\n \"q\": pd.Series(dtype=\"pint[V]\"),\n \"phase\": pd.Series(dtype=\"pint[deg]\"),\n }\n )\n self.quantities = {\"MSR\": \"V\", \"i\": \"V\", \"q\": \"V\", \"phase\": \"rad\"}\n self.options = []\n\n if quantities is not None:\n self.quantities.update(quantities)\n for name, unit in quantities.items():\n self.df.insert(0, name, pd.Series(dtype=f\"pint[{unit}]\"))\n\n if options is not None:\n self.options = options\n for option in options:\n self.df.insert( # pylint: disable=E1101\n 0, option, pd.Series(dtype=object)\n )\n\n from pint import UnitRegistry\n\n self.ureg = UnitRegistry()\n\n @property\n def df(self):\n return self._df\n\n @df.setter\n def df(self, df):\n \"\"\"Set df attribute.\n\n Args:\n df (pd.DataFrame): pandas DataFrame. Every key should have the following form:\n ``[]``.\n \"\"\"\n if isinstance(df, pd.DataFrame):\n self._df = df\n else:\n raise_error(TypeError, f\"{df.type} is not a pd.DataFrame.\")\n\n def load_data_from_dict(self, data: dict):\n \"\"\"Set df attribute.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n processed_data = {}\n for key, values in data.items():\n if \"[\" in key:\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n processed_data[name] = pd.Series(\n data=(np.array(values) * self.ureg(unit)), dtype=f\"pint[{unit}]\"\n )\n else:\n processed_data[key] = pd.Series(data=(values), dtype=object)\n self._df = pd.DataFrame(processed_data)\n\n def add(self, data):\n \"\"\"Add a row to `DataUnits`.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n\n for key, value in data.items():\n if \"[\" in key:\n name = key.split(\"[\")[0]\n unit = re.search(r\"\\[([A-Za-z0-9_]+)\\]\", key).group(1)\n # TODO: find a better way to do this\n self.df.loc[l, name] = np.array(value) * self.ureg(unit)\n else:\n self.df.loc[l, key] = value\n\n def get_values(self, key, unit=None):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n unit (str): Unit of the returned values.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n if unit is None:\n return self.df[key]\n else:\n return self.df[key].pint.to(unit).pint.magnitude\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n data (``DataUnits``): dataset object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file, header=[0, 1])\n obj.df.pop(\"Unnamed: 0_level_0\")\n quantities_label = []\n obj.options = []\n for column in obj.df.columns: # pylint: disable=E1101\n if \"Unnamed\" not in column[1]:\n quantities_label.append(column[0])\n else:\n obj.options.append(column[0])\n quantities_df = obj.df[quantities_label].pint.quantify()\n options_df = obj.df[obj.options]\n options_df.columns = options_df.columns.droplevel(1)\n obj.df = pd.concat([quantities_df, options_df], axis=1)\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n data = self.df[list(self.quantities)].pint.dequantify()\n firsts = data.index.get_level_values(None)\n data[self.options] = self.df[self.options].loc[firsts].values\n data.to_csv(f\"{path}/{self.name}.csv\")\n\n\nclass Data(AbstractData):\n \"\"\"Class to store the data obtained from calibration routines.\n It is a wrapper to a pandas DataFrame.\n\n Args:\n quantities (dict): dictionary quantities to be saved.\n \"\"\"\n\n def __init__(self, name=None, quantities=None):\n\n super().__init__(name=name)\n\n if quantities is not None:\n self.quantities = quantities\n for name in quantities:\n self.df.insert(0, name, pd.Series(dtype=object))\n\n @property\n def df(self):\n return self._df\n\n @df.setter\n def df(self, data):\n \"\"\"Set df attribute.\n\n Args:\n df (pd.DataFrame):\n \"\"\"\n if isinstance(data, pd.DataFrame):\n self._df = data\n\n def load_data_from_dict(self, data: dict):\n \"\"\"Set df attribute.\n\n Args:\n df (dict): dictionary containing the data to be added.\n \"\"\"\n processed_data = {}\n for key, values in data.items():\n processed_data[key] = pd.Series(data=(values), dtype=object)\n self._df = pd.DataFrame(processed_data)\n\n def add(self, data):\n \"\"\"Add a row to data.\n\n Args:\n data (dict): dictionary containing the data to be added.\n Every key should have the following form:\n ``[]``.\n \"\"\"\n l = len(self)\n for key, value in data.items():\n self.df.loc[l, key] = value\n\n def get_values(self, quantity):\n \"\"\"Get values of a quantity in specified units.\n\n Args:\n quantity (str): Quantity to get the values of.\n\n Returns:\n ``pd.Series`` with the quantity values in the given units.\n \"\"\"\n return self.df[quantity].values\n\n @classmethod\n def load_data(cls, folder, routine, format, name):\n \"\"\"Load data from specific format.\n\n Args:\n folder (path): path to the output folder from which the data will be loaded\n routine (str): calibration routine data to be loaded\n format (str): data format. Possible choices are 'csv' and 'pickle'.\n\n Returns:\n data (``Data``): data object with the loaded data.\n \"\"\"\n obj = cls()\n if format == \"csv\":\n file = f\"{folder}/data/{routine}/{name}.csv\"\n obj.df = pd.read_csv(file)\n obj.df.pop(\"Unnamed: 0\")\n elif format == \"pickle\":\n file = f\"{folder}/data/{routine}/{name}.pkl\"\n obj.df = pd.read_pickle(file)\n else:\n raise_error(ValueError, f\"Cannot load data using {format} format.\")\n\n return obj\n\n def to_csv(self, path):\n \"\"\"Save data in csv file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_csv(f\"{path}/{self.name}.csv\")\n\n def to_pickle(self, path):\n \"\"\"Save data in pickel file.\n\n Args:\n path (str): Path containing output folder.\"\"\"\n self.df.to_pickle(f\"{path}/{self.name}.pkl\")\n\nsrc/qibocal/config.py METASEP\n\"\"\"Custom logger implemenation.\"\"\"\nimport logging\nimport os\n\n# Logging levels available here https://docs.python.org/3/library/logging.html#logging-levels\nQIBOCAL_LOG_LEVEL = 30\nif \"QIBOCAL_LOG_LEVEL\" in os.environ: # pragma: no cover\n QIBOCAL_LOG_LEVEL = 10 * int(os.environ.get(\"QIBOCAL_LOG_LEVEL\"))\n\n\ndef raise_error(exception, message=None, args=None):\n \"\"\"Raise exception with logging error.\n\n Args:\n exception (Exception): python exception.\n message (str): the error message.\n \"\"\"\n log.error(message)\n if args:\n raise exception(message, args)\n else:\n raise exception(message)\n\n\n# Configuration for logging mechanism\nclass CustomHandler(logging.StreamHandler):\n \"\"\"Custom handler for logging algorithm.\"\"\"\n\n def format(self, record):\n \"\"\"Format the record with specific format.\"\"\"\n from qibocal import __version__\n\n fmt = f\"[Qibocal {__version__}|%(levelname)s|%(asctime)s]: %(message)s\"\n\n grey = \"\\x1b[38;20m\"\n green = \"\\x1b[92m\"\n yellow = \"\\x1b[33;20m\"\n red = \"\\x1b[31;20m\"\n bold_red = \"\\x1b[31;1m\"\n reset = \"\\x1b[0m\"\n\n self.FORMATS = {\n logging.DEBUG: green + fmt + reset,\n logging.INFO: grey + fmt + reset,\n logging.WARNING: yellow + fmt + reset,\n logging.ERROR: red + fmt + reset,\n logging.CRITICAL: bold_red + fmt + reset,\n }\n log_fmt = self.FORMATS.get(record.levelno)\n return logging.Formatter(log_fmt, datefmt=\"%Y-%m-%d %H:%M:%S\").format(record)\n\n\n# allocate logger object\nlog = logging.getLogger(__name__)\nlog.setLevel(QIBOCAL_LOG_LEVEL)\nlog.addHandler(CustomHandler())\n\nsrc/qibocal/__init__.py METASEP\nfrom .cli import command, live_plot, upload\n\n\"\"\"qibocal: Quantum Calibration Verification and Validation using Qibo.\"\"\"\nimport importlib.metadata as im\n\n__version__ = im.version(__package__)\n\nsrc/qibocal/calibrations/__init__.py METASEP\nfrom qibocal.calibrations.characterization.allXY import *\nfrom qibocal.calibrations.characterization.calibrate_qubit_states import *\nfrom qibocal.calibrations.characterization.flipping import *\nfrom qibocal.calibrations.characterization.qubit_spectroscopy import *\nfrom qibocal.calibrations.characterization.rabi_oscillations import *\nfrom qibocal.calibrations.characterization.ramsey import *\nfrom qibocal.calibrations.characterization.resonator_spectroscopy import *\nfrom qibocal.calibrations.characterization.t1 import *\nfrom qibocal.calibrations.protocols.test import *\n\nsrc/qibocal/calibrations/protocols/test.py METASEP\nfrom qibo import gates, models\n\nfrom qibocal.data import Data\n\n\ndef test(\n platform,\n qubit: list,\n nshots,\n points=1,\n):\n data = Data(\"test\", quantities=[\"nshots\", \"probabilities\"])\n nqubits = len(qubit)\n circuit = models.Circuit(nqubits)\n circuit.add(gates.H(qubit[0]))\n circuit.add(gates.H(qubit[1]))\n # circuit.add(gates.H(1))\n circuit.add(gates.M(*qubit))\n execution = circuit(nshots=nshots)\n\n data.add({\"nshots\": nshots, \"probabilities\": execution.probabilities()})\n yield data\n\nsrc/qibocal/calibrations/protocols/__init__.py METASEP\n\nsrc/qibocal/calibrations/protocols/abstract.py METASEP\n"},"file_context":{"kind":"list like","value":[{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=x_fit,\n y=y_fit,\n name=\"A: {:.3f}, p: {:.3f}, B: {:.3f}\".format(popt[0], popt[1], popt[2]),\n line=go.scatter.Line(dash=\"dot\"),\n )\n myfigs.append(fig)\n self.all_figures.append({'figs' : myfigs})\n\n def report(self):\n from plotly.subplots import make_subplots\n\n l = len(self.all_figures)\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\n fig = make_subplots(\n rows=l, cols=1 if len == 1 else 2,\n subplot_titles = subplot_titles)\n for count, fig_dict in enumerate(self.all_figures):\n plot_list = fig_dict['figs']\n for plot in plot_list:\n fig.add_trace(plot, row=count//2 + 1, col = count%2+1)\n fig.update_xaxes(title_font_size=18, tickfont_size=16)\n fig.update_yaxes(title_font_size=18, tickfont_size=16)\n fig.update_layout(\n font_family=\"Averta\",\n hoverlabel_font_family=\"Averta\",\n title_text=\"Report\",\n hoverlabel_font_size=16,\n showlegend=True,\n height=500 * int(l/2),\n width=1000,\n )\n return fig\n\n\ndef embed_unitary_circuit(circuit: Circuit, nqubits: int, support: list) -> Circuit:\n \"\"\"Takes a circuit and redistributes the gates to the support of\n a new circuit with ``nqubits`` qubits.\n\n Args:\n circuit (Circuit): The circuit with len(``support``) many qubits.\n nqubits (int): Qubits of new circuit.\n support (list): The qubits were the gates should be places.\n\n Returns:\n Circuit: Circuit with redistributed gates.\n \"\"\"\n\n idxmap = np.vectorize(lambda idx: support[idx])\n newcircuit = Circuit(nqubits)\n for gate in circuit.queue:","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:","type":"infile"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=x_fit,\n y=y_fit,","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=x_fit,\n y=y_fit,\n name=\"A: {:.3f}, p: {:.3f}, B: {:.3f}\".format(popt[0], popt[1], popt[2]),\n line=go.scatter.Line(dash=\"dot\"),\n )\n myfigs.append(fig)\n self.all_figures.append({'figs' : myfigs})\n\n def report(self):\n from plotly.subplots import make_subplots\n\n l = len(self.all_figures)\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\n fig = make_subplots(\n rows=l, cols=1 if len == 1 else 2,\n subplot_titles = subplot_titles)\n for count, fig_dict in enumerate(self.all_figures):\n plot_list = fig_dict['figs']","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=x_fit,\n y=y_fit,\n name=\"A: {:.3f}, p: {:.3f}, B: {:.3f}\".format(popt[0], popt[1], popt[2]),\n line=go.scatter.Line(dash=\"dot\"),\n )\n myfigs.append(fig)\n self.all_figures.append({'figs' : myfigs})\n\n def report(self):\n from plotly.subplots import make_subplots\n\n l = len(self.all_figures)\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\n fig = make_subplots(\n rows=l, cols=1 if len == 1 else 2,\n subplot_titles = subplot_titles)\n for count, fig_dict in enumerate(self.all_figures):\n plot_list = fig_dict['figs']\n for plot in plot_list:","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=x_fit,\n y=y_fit,\n name=\"A: {:.3f}, p: {:.3f}, B: {:.3f}\".format(popt[0], popt[1], popt[2]),\n line=go.scatter.Line(dash=\"dot\"),\n )\n myfigs.append(fig)\n self.all_figures.append({'figs' : myfigs})\n\n def report(self):\n from plotly.subplots import make_subplots\n\n l = len(self.all_figures)\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\n fig = make_subplots(\n rows=l, cols=1 if len == 1 else 2,\n subplot_titles = subplot_titles)\n for count, fig_dict in enumerate(self.all_figures):\n plot_list = fig_dict['figs']\n for plot in plot_list:\n fig.add_trace(plot, row=count//2 + 1, col = count%2+1)\n fig.update_xaxes(title_font_size=18, tickfont_size=16)\n fig.update_yaxes(title_font_size=18, tickfont_size=16)\n fig.update_layout(\n font_family=\"Averta\",\n hoverlabel_font_family=\"Averta\",\n title_text=\"Report\",\n hoverlabel_font_size=16,\n showlegend=True,\n height=500 * int(l/2),\n width=1000,\n )\n return fig\n\n\ndef embed_unitary_circuit(circuit: Circuit, nqubits: int, support: list) -> Circuit:\n \"\"\"Takes a circuit and redistributes the gates to the support of\n a new circuit with ``nqubits`` qubits.\n\n Args:\n circuit (Circuit): The circuit with len(``support``) many qubits.\n nqubits (int): Qubits of new circuit.\n support (list): The qubits were the gates should be places.\n\n Returns:\n Circuit: Circuit with redistributed gates.\n \"\"\"\n\n idxmap = np.vectorize(lambda idx: support[idx])\n newcircuit = Circuit(nqubits)\n for gate in circuit.queue:\n if not isinstance(gate, gates.measurements.M):","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=x_fit,\n y=y_fit,\n name=\"A: {:.3f}, p: {:.3f}, B: {:.3f}\".format(popt[0], popt[1], popt[2]),\n line=go.scatter.Line(dash=\"dot\"),\n )\n myfigs.append(fig)\n self.all_figures.append({'figs' : myfigs})\n\n def report(self):\n from plotly.subplots import make_subplots\n\n l = len(self.all_figures)\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\n fig = make_subplots(\n rows=l, cols=1 if len == 1 else 2,\n subplot_titles = subplot_titles)\n for count, fig_dict in enumerate(self.all_figures):\n plot_list = fig_dict['figs']\n for plot in plot_list:\n fig.add_trace(plot, row=count//2 + 1, col = count%2+1)\n fig.update_xaxes(title_font_size=18, tickfont_size=16)\n fig.update_yaxes(title_font_size=18, tickfont_size=16)\n fig.update_layout(\n font_family=\"Averta\",\n hoverlabel_font_family=\"Averta\",\n title_text=\"Report\",\n hoverlabel_font_size=16,\n showlegend=True,\n height=500 * int(l/2),\n width=1000,\n )\n return fig\n\n\ndef embed_unitary_circuit(circuit: Circuit, nqubits: int, support: list) -> Circuit:\n \"\"\"Takes a circuit and redistributes the gates to the support of\n a new circuit with ``nqubits`` qubits.\n\n Args:\n circuit (Circuit): The circuit with len(``support``) many qubits.\n nqubits (int): Qubits of new circuit.\n support (list): The qubits were the gates should be places.\n\n Returns:\n Circuit: Circuit with redistributed gates.\n \"\"\"\n\n idxmap = np.vectorize(lambda idx: support[idx])\n newcircuit = Circuit(nqubits)\n for gate in circuit.queue:\n if not isinstance(gate, gates.measurements.M):\n newcircuit.add(\n gate.__class__(gate.init_args[0], *idxmap(np.array(gate.init_args[1:])))\n )\n else:","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:","type":"inproject"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=x_fit,\n y=y_fit,\n name=\"A: {:.3f}, p: {:.3f}, B: {:.3f}\".format(popt[0], popt[1], popt[2]),\n line=go.scatter.Line(dash=\"dot\"),\n )\n myfigs.append(fig)\n self.all_figures.append({'figs' : myfigs})\n\n def report(self):\n from plotly.subplots import make_subplots\n\n l = len(self.all_figures)","type":"common"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)","type":"common"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:","type":"commited"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:","type":"commited"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:","type":"commited"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"","type":"non_informative"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general","type":"non_informative"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):","type":"non_informative"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:","type":"random"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0","type":"random"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=x_fit,\n y=y_fit,\n name=\"A: {:.3f}, p: {:.3f}, B: {:.3f}\".format(popt[0], popt[1], popt[2]),\n line=go.scatter.Line(dash=\"dot\"),\n )\n myfigs.append(fig)\n self.all_figures.append({'figs' : myfigs})\n\n def report(self):\n from plotly.subplots import make_subplots\n\n l = len(self.all_figures)\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\n fig = make_subplots(\n rows=l, cols=1 if len == 1 else 2,\n subplot_titles = subplot_titles)\n for count, fig_dict in enumerate(self.all_figures):","type":"random"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))\n return circuit\n\n def clifford_unitary(\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\n ) -> np.ndarray:\n \"\"\"Four given parameters are used to build one Clifford unitary.\n\n Args:\n theta (float) : An angle\n nx (float) : prefactor\n ny (float) : prefactor\n nz (float) : prefactor\n\n Returns:\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\n \"\"\"\n matrix = np.array(\n [\n [\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n ],\n [\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\n ],\n ]\n )\n return matrix\n\n def gates(self) -> list(gates.Unitary):\n \"\"\"Draws the parameters and builds the unitary Clifford gates for\n a circuit layer.\n\n Returns:\n list filled with ``qibo.gates.Unitary``:\n the simulatanous Clifford gates.\n \"\"\"\n # There are this many different Clifford matrices.\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\n gates_list = []\n # Choose as many random integers between 0 and 23 as there are used\n # qubits. Get the clifford parameters and build the unitares.\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\n # Build the random Clifford matrices append them\n gates_list.append(\n gates.Unitary(\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\n )\n )\n # Make a unitary gate out of 'unitary' for the qubits.\n return gates_list\n \n\nclass Experiment:\n \"\"\"Experiment objects which holds an iterable circuit factory along with\n a simple data structure associated to each circuit.\n\n Args:\n circuitfactory (Iterable): Gives a certain amount of circuits when\n iterated over.\n data (list): If filled ``data`` can be used to specifying parameters\n while executing a circuit or deciding how to process results.\n nshots (int): For execution of circuit, indicates how many shots.\n \"\"\"\n\n def __init__(\n self,\n circuitfactory: Iterable,\n nshots: int = None,\n data: list = None,\n noisemodel: NoiseModel = None,\n ) -> None:\n \"\"\" \"\"\"\n self.circuitfactory = circuitfactory\n self.nshots = nshots\n self.data = data\n self.__noise_model = noisemodel\n\n @classmethod\n def load(cls, path: str) -> Experiment:\n \"\"\"Creates an object with data and if possible with circuits.\n\n Args:\n path (str): The directory from where the object should be restored.\n\n Returns:\n Experiment: The object with data (and circuitfactory).\n \"\"\"\n datapath = f\"{path}data.pkl\"\n circuitspath = f\"{path}circuits.pkl\"\n if isfile(datapath):\n with open(datapath, \"rb\") as f:\n data = pickle.load(f)\n if isinstance(data, pd.DataFrame):\n data = data.to_dict(\"records\")\n nshots = len(data[0][\"samples\"])\n else:\n data = None\n if isfile(circuitspath):\n with open(circuitspath, \"rb\") as f:\n circuitfactory = pickle.load(f)\n else:\n circuitfactory = None\n # Initiate an instance of the experiment class.\n obj = cls(circuitfactory, data=data, nshots=nshots)\n return obj\n\n def prebuild(self) -> None:\n \"\"\"Converts the attribute ``circuitfactory`` which is in general\n an iterable into a list.\n \"\"\"\n self.circuitfactory = list(self.circuitfactory)\n\n def execute(self) -> None:\n \"\"\"Calls method ``single_task`` while iterating over attribute\n ``circuitfactory```.\n\n Collects data given the already set data and overwrites\n attribute ``data``.\n \"\"\"\n if self.circuitfactory is None:\n raise NotImplementedError(\"There are no circuits to execute.\")\n newdata = []\n for circuit in self.circuitfactory:\n try:\n datarow = next(self.data)\n except TypeError:\n datarow = {}\n newdata.append(self.single_task(deepcopy(circuit), datarow))\n self.data = newdata\n\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\n \"\"\"Executes a circuit, returns the single shot results.\n\n Args:\n circuit (Circuit): Will be executed, has to return samples.\n datarow (dict): Dictionary with parameters for execution and\n immediate postprocessing information.\n \"\"\"\n if self.__noise_model is not None:\n circuit = self.__noise_model.apply(circuit)\n samples = circuit(nshots=self.nshots).samples()\n return {\"samples\": samples}\n\n def save(self) -> None:\n \"\"\"Creates a path and pickles relevent data from ``self.data`` and\n if ``self.circuitfactory`` is a list that one too.\n \"\"\"\n self.path = experiment_directory(\"standardrb\")\n if isinstance(self.circuitfactory, list):\n with open(f\"{self.path}circuits.pkl\", \"wb\") as f:\n pickle.dump(self.circuitfactory, f)\n with open(f\"{self.path}data.pkl\", \"wb\") as f:\n pickle.dump(self.data, f)\n\n @property\n def dataframe(self) -> pd.DataFrame:\n return pd.DataFrame(self.data)\n\n def _append_data(self, name: str, datacolumn: list) -> None:\n \"\"\"Adds data column to ``data`` attribute.\n\n Args:\n name (str): Name of data column.\n datacolumn (list): A list of the right shape\n \"\"\"\n if len(datacolumn) != len(self.data):\n raise ValueError(\"Given data column doesn't have the right length.\")\n df = self.dataframe\n df[name] = datacolumn\n self.data = df.to_dict(\"records\")\n\n @property\n def samples(self) -> np.ndarray:\n \"\"\"Returns the samples from ``self.data`` in a 2d array.\n\n Returns:\n np.ndarray: 2d array of samples.\n \"\"\"\n\n try:\n return np.array(self.dataframe[\"samples\"].tolist())\n except KeyError:\n print(\"No samples here. Execute experiment first.\")\n return None\n\n @property\n def probabilities(self) -> np.ndarray:\n \"\"\"Takes the stored samples and returns probabilities for each\n possible state to occure.\n\n Returns:\n np.ndarray: Probability array of 2 dimension.\n \"\"\"\n\n allsamples = self.samples\n if allsamples is None:\n print(\"No probabilities either.\")\n return None\n # Create all possible state vectors.\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\n # Iterate over all the samples and count the different states.\n probs = [\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\n for samples in allsamples\n ]\n probs = np.array(probs) / (self.nshots)\n return probs\n\n def apply_task(self, gtask):\n self = gtask(self)\n\n\nclass Result:\n \"\"\"Once initialized with the correct parameters an Result object can build\n reports to display results of an randomized benchmarking experiment.\n \"\"\"\n\n def __init__(self, dataframe: pd.DataFrame) -> None:\n self.df = dataframe\n self.all_figures = []\n\n def extract(self, group_by: str, output: str, agg_type: str):\n \"\"\"Aggregates the dataframe, extracts the data by which the frame was\n grouped, what was calculated given the ``agg_type`` parameters.\n\n Args:\n group_by (str): _description_\n output (str): _description_\n agg_type (str): _description_\n \"\"\"\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\n \n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\n myfigs = []\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\n fig = go.Scatter(\n x=xdata_scatter,\n y=ydata_scatter,\n line=dict(color=\"#6597aa\"),\n mode=\"markers\",\n marker={\"opacity\": 0.2, \"symbol\": \"square\"},\n name=\"runs\",\n )\n myfigs.append(fig)\n fig = go.Scatter(\n x=xdata, y=ydata, line=dict(color=\"#aa6464\"), mode=\"markers\", name=\"average\"\n )","type":"random"},{"content":"from __future__ import annotations\n\nimport pickle\nfrom collections.abc import Iterable\nfrom copy import deepcopy\nfrom itertools import product\nfrom os.path import isfile\nimport plotly.graph_objects as go\n\nimport numpy as np\nimport pandas as pd\nfrom qibo import gates\nfrom qibo.models import Circuit\nfrom qibo.noise import NoiseModel\n\nfrom qibocal.calibrations.protocols.utils import (\n ONEQUBIT_CLIFFORD_PARAMS,\n experiment_directory,\n)\n\n\nclass Circuitfactory:\n \"\"\"TODO write documentation\n TODO make the embedding into lager qubit space possible\"\"\"\n\n def __init__(\n self, nqubits: int, depths: list, runs: int, qubits: list = None\n ) -> None:\n self.nqubits = nqubits if nqubits is not None else len(qubits)\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\n self.depths = depths\n self.runs = runs\n\n def __len__(self):\n return self.runs * len(self.depths)\n\n def __iter__(self) -> None:\n self.n = 0\n return self\n\n def __next__(self) -> None:\n if self.n >= self.runs * len(self.depths):\n raise StopIteration\n else:\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\n self.n += 1\n # Distribute the circuit onto the given support.\n bigcircuit = Circuit(self.nqubits)\n bigcircuit.add(circuit.on_qubits(*self.qubits))\n return bigcircuit\n\n def build_circuit(self, depth: int):\n raise NotImplementedError\n\n\nclass SingleCliffordsFactory(Circuitfactory):\n def __init__(\n self, nqubits: list, depths: list, runs: int, qubits: list = None\n ) -> None:\n super().__init__(nqubits, depths, runs, qubits)\n\n def build_circuit(self, depth: int):\n circuit = Circuit(len(self.qubits))\n for _ in range(depth):\n circuit.add(self.gates())\n circuit.add(gates.M(*range(len(self.qubits))))","type":"random"}],"string":"[\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=x_fit,\\n y=y_fit,\\n name=\\\"A: {:.3f}, p: {:.3f}, B: {:.3f}\\\".format(popt[0], popt[1], popt[2]),\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n )\\n myfigs.append(fig)\\n self.all_figures.append({'figs' : myfigs})\\n\\n def report(self):\\n from plotly.subplots import make_subplots\\n\\n l = len(self.all_figures)\\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\\n fig = make_subplots(\\n rows=l, cols=1 if len == 1 else 2,\\n subplot_titles = subplot_titles)\\n for count, fig_dict in enumerate(self.all_figures):\\n plot_list = fig_dict['figs']\\n for plot in plot_list:\\n fig.add_trace(plot, row=count//2 + 1, col = count%2+1)\\n fig.update_xaxes(title_font_size=18, tickfont_size=16)\\n fig.update_yaxes(title_font_size=18, tickfont_size=16)\\n fig.update_layout(\\n font_family=\\\"Averta\\\",\\n hoverlabel_font_family=\\\"Averta\\\",\\n title_text=\\\"Report\\\",\\n hoverlabel_font_size=16,\\n showlegend=True,\\n height=500 * int(l/2),\\n width=1000,\\n )\\n return fig\\n\\n\\ndef embed_unitary_circuit(circuit: Circuit, nqubits: int, support: list) -> Circuit:\\n \\\"\\\"\\\"Takes a circuit and redistributes the gates to the support of\\n a new circuit with ``nqubits`` qubits.\\n\\n Args:\\n circuit (Circuit): The circuit with len(``support``) many qubits.\\n nqubits (int): Qubits of new circuit.\\n support (list): The qubits were the gates should be places.\\n\\n Returns:\\n Circuit: Circuit with redistributed gates.\\n \\\"\\\"\\\"\\n\\n idxmap = np.vectorize(lambda idx: support[idx])\\n newcircuit = Circuit(nqubits)\\n for gate in circuit.queue:\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\",\n \"type\": \"infile\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=x_fit,\\n y=y_fit,\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=x_fit,\\n y=y_fit,\\n name=\\\"A: {:.3f}, p: {:.3f}, B: {:.3f}\\\".format(popt[0], popt[1], popt[2]),\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n )\\n myfigs.append(fig)\\n self.all_figures.append({'figs' : myfigs})\\n\\n def report(self):\\n from plotly.subplots import make_subplots\\n\\n l = len(self.all_figures)\\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\\n fig = make_subplots(\\n rows=l, cols=1 if len == 1 else 2,\\n subplot_titles = subplot_titles)\\n for count, fig_dict in enumerate(self.all_figures):\\n plot_list = fig_dict['figs']\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=x_fit,\\n y=y_fit,\\n name=\\\"A: {:.3f}, p: {:.3f}, B: {:.3f}\\\".format(popt[0], popt[1], popt[2]),\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n )\\n myfigs.append(fig)\\n self.all_figures.append({'figs' : myfigs})\\n\\n def report(self):\\n from plotly.subplots import make_subplots\\n\\n l = len(self.all_figures)\\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\\n fig = make_subplots(\\n rows=l, cols=1 if len == 1 else 2,\\n subplot_titles = subplot_titles)\\n for count, fig_dict in enumerate(self.all_figures):\\n plot_list = fig_dict['figs']\\n for plot in plot_list:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=x_fit,\\n y=y_fit,\\n name=\\\"A: {:.3f}, p: {:.3f}, B: {:.3f}\\\".format(popt[0], popt[1], popt[2]),\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n )\\n myfigs.append(fig)\\n self.all_figures.append({'figs' : myfigs})\\n\\n def report(self):\\n from plotly.subplots import make_subplots\\n\\n l = len(self.all_figures)\\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\\n fig = make_subplots(\\n rows=l, cols=1 if len == 1 else 2,\\n subplot_titles = subplot_titles)\\n for count, fig_dict in enumerate(self.all_figures):\\n plot_list = fig_dict['figs']\\n for plot in plot_list:\\n fig.add_trace(plot, row=count//2 + 1, col = count%2+1)\\n fig.update_xaxes(title_font_size=18, tickfont_size=16)\\n fig.update_yaxes(title_font_size=18, tickfont_size=16)\\n fig.update_layout(\\n font_family=\\\"Averta\\\",\\n hoverlabel_font_family=\\\"Averta\\\",\\n title_text=\\\"Report\\\",\\n hoverlabel_font_size=16,\\n showlegend=True,\\n height=500 * int(l/2),\\n width=1000,\\n )\\n return fig\\n\\n\\ndef embed_unitary_circuit(circuit: Circuit, nqubits: int, support: list) -> Circuit:\\n \\\"\\\"\\\"Takes a circuit and redistributes the gates to the support of\\n a new circuit with ``nqubits`` qubits.\\n\\n Args:\\n circuit (Circuit): The circuit with len(``support``) many qubits.\\n nqubits (int): Qubits of new circuit.\\n support (list): The qubits were the gates should be places.\\n\\n Returns:\\n Circuit: Circuit with redistributed gates.\\n \\\"\\\"\\\"\\n\\n idxmap = np.vectorize(lambda idx: support[idx])\\n newcircuit = Circuit(nqubits)\\n for gate in circuit.queue:\\n if not isinstance(gate, gates.measurements.M):\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=x_fit,\\n y=y_fit,\\n name=\\\"A: {:.3f}, p: {:.3f}, B: {:.3f}\\\".format(popt[0], popt[1], popt[2]),\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n )\\n myfigs.append(fig)\\n self.all_figures.append({'figs' : myfigs})\\n\\n def report(self):\\n from plotly.subplots import make_subplots\\n\\n l = len(self.all_figures)\\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\\n fig = make_subplots(\\n rows=l, cols=1 if len == 1 else 2,\\n subplot_titles = subplot_titles)\\n for count, fig_dict in enumerate(self.all_figures):\\n plot_list = fig_dict['figs']\\n for plot in plot_list:\\n fig.add_trace(plot, row=count//2 + 1, col = count%2+1)\\n fig.update_xaxes(title_font_size=18, tickfont_size=16)\\n fig.update_yaxes(title_font_size=18, tickfont_size=16)\\n fig.update_layout(\\n font_family=\\\"Averta\\\",\\n hoverlabel_font_family=\\\"Averta\\\",\\n title_text=\\\"Report\\\",\\n hoverlabel_font_size=16,\\n showlegend=True,\\n height=500 * int(l/2),\\n width=1000,\\n )\\n return fig\\n\\n\\ndef embed_unitary_circuit(circuit: Circuit, nqubits: int, support: list) -> Circuit:\\n \\\"\\\"\\\"Takes a circuit and redistributes the gates to the support of\\n a new circuit with ``nqubits`` qubits.\\n\\n Args:\\n circuit (Circuit): The circuit with len(``support``) many qubits.\\n nqubits (int): Qubits of new circuit.\\n support (list): The qubits were the gates should be places.\\n\\n Returns:\\n Circuit: Circuit with redistributed gates.\\n \\\"\\\"\\\"\\n\\n idxmap = np.vectorize(lambda idx: support[idx])\\n newcircuit = Circuit(nqubits)\\n for gate in circuit.queue:\\n if not isinstance(gate, gates.measurements.M):\\n newcircuit.add(\\n gate.__class__(gate.init_args[0], *idxmap(np.array(gate.init_args[1:])))\\n )\\n else:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\",\n \"type\": \"inproject\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=x_fit,\\n y=y_fit,\\n name=\\\"A: {:.3f}, p: {:.3f}, B: {:.3f}\\\".format(popt[0], popt[1], popt[2]),\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n )\\n myfigs.append(fig)\\n self.all_figures.append({'figs' : myfigs})\\n\\n def report(self):\\n from plotly.subplots import make_subplots\\n\\n l = len(self.all_figures)\",\n \"type\": \"common\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\",\n \"type\": \"common\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\",\n \"type\": \"commited\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\",\n \"type\": \"commited\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\",\n \"type\": \"commited\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\",\n \"type\": \"non_informative\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\",\n \"type\": \"random\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\",\n \"type\": \"random\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=x_fit,\\n y=y_fit,\\n name=\\\"A: {:.3f}, p: {:.3f}, B: {:.3f}\\\".format(popt[0], popt[1], popt[2]),\\n line=go.scatter.Line(dash=\\\"dot\\\"),\\n )\\n myfigs.append(fig)\\n self.all_figures.append({'figs' : myfigs})\\n\\n def report(self):\\n from plotly.subplots import make_subplots\\n\\n l = len(self.all_figures)\\n subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\\n fig = make_subplots(\\n rows=l, cols=1 if len == 1 else 2,\\n subplot_titles = subplot_titles)\\n for count, fig_dict in enumerate(self.all_figures):\",\n \"type\": \"random\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\\n return circuit\\n\\n def clifford_unitary(\\n self, theta: float = 0, nx: float = 0, ny: float = 0, nz: float = 0\\n ) -> np.ndarray:\\n \\\"\\\"\\\"Four given parameters are used to build one Clifford unitary.\\n\\n Args:\\n theta (float) : An angle\\n nx (float) : prefactor\\n ny (float) : prefactor\\n nz (float) : prefactor\\n\\n Returns:\\n ``qibo.gates.Unitary`` with the drawn matrix as unitary.\\n \\\"\\\"\\\"\\n matrix = np.array(\\n [\\n [\\n np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\\n -ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n ],\\n [\\n ny * np.sin(theta / 2) - 1.0j * nx * np.sin(theta / 2),\\n np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\\n ],\\n ]\\n )\\n return matrix\\n\\n def gates(self) -> list(gates.Unitary):\\n \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\\n a circuit layer.\\n\\n Returns:\\n list filled with ``qibo.gates.Unitary``:\\n the simulatanous Clifford gates.\\n \\\"\\\"\\\"\\n # There are this many different Clifford matrices.\\n amount = len(ONEQUBIT_CLIFFORD_PARAMS)\\n gates_list = []\\n # Choose as many random integers between 0 and 23 as there are used\\n # qubits. Get the clifford parameters and build the unitares.\\n for count, rint in enumerate(np.random.randint(0, amount, size=len(self.qubits))):\\n # Build the random Clifford matrices append them\\n gates_list.append(\\n gates.Unitary(\\n self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\\n )\\n )\\n # Make a unitary gate out of 'unitary' for the qubits.\\n return gates_list\\n \\n\\nclass Experiment:\\n \\\"\\\"\\\"Experiment objects which holds an iterable circuit factory along with\\n a simple data structure associated to each circuit.\\n\\n Args:\\n circuitfactory (Iterable): Gives a certain amount of circuits when\\n iterated over.\\n data (list): If filled ``data`` can be used to specifying parameters\\n while executing a circuit or deciding how to process results.\\n nshots (int): For execution of circuit, indicates how many shots.\\n \\\"\\\"\\\"\\n\\n def __init__(\\n self,\\n circuitfactory: Iterable,\\n nshots: int = None,\\n data: list = None,\\n noisemodel: NoiseModel = None,\\n ) -> None:\\n \\\"\\\"\\\" \\\"\\\"\\\"\\n self.circuitfactory = circuitfactory\\n self.nshots = nshots\\n self.data = data\\n self.__noise_model = noisemodel\\n\\n @classmethod\\n def load(cls, path: str) -> Experiment:\\n \\\"\\\"\\\"Creates an object with data and if possible with circuits.\\n\\n Args:\\n path (str): The directory from where the object should be restored.\\n\\n Returns:\\n Experiment: The object with data (and circuitfactory).\\n \\\"\\\"\\\"\\n datapath = f\\\"{path}data.pkl\\\"\\n circuitspath = f\\\"{path}circuits.pkl\\\"\\n if isfile(datapath):\\n with open(datapath, \\\"rb\\\") as f:\\n data = pickle.load(f)\\n if isinstance(data, pd.DataFrame):\\n data = data.to_dict(\\\"records\\\")\\n nshots = len(data[0][\\\"samples\\\"])\\n else:\\n data = None\\n if isfile(circuitspath):\\n with open(circuitspath, \\\"rb\\\") as f:\\n circuitfactory = pickle.load(f)\\n else:\\n circuitfactory = None\\n # Initiate an instance of the experiment class.\\n obj = cls(circuitfactory, data=data, nshots=nshots)\\n return obj\\n\\n def prebuild(self) -> None:\\n \\\"\\\"\\\"Converts the attribute ``circuitfactory`` which is in general\\n an iterable into a list.\\n \\\"\\\"\\\"\\n self.circuitfactory = list(self.circuitfactory)\\n\\n def execute(self) -> None:\\n \\\"\\\"\\\"Calls method ``single_task`` while iterating over attribute\\n ``circuitfactory```.\\n\\n Collects data given the already set data and overwrites\\n attribute ``data``.\\n \\\"\\\"\\\"\\n if self.circuitfactory is None:\\n raise NotImplementedError(\\\"There are no circuits to execute.\\\")\\n newdata = []\\n for circuit in self.circuitfactory:\\n try:\\n datarow = next(self.data)\\n except TypeError:\\n datarow = {}\\n newdata.append(self.single_task(deepcopy(circuit), datarow))\\n self.data = newdata\\n\\n def single_task(self, circuit: Circuit, datarow: dict) -> None:\\n \\\"\\\"\\\"Executes a circuit, returns the single shot results.\\n\\n Args:\\n circuit (Circuit): Will be executed, has to return samples.\\n datarow (dict): Dictionary with parameters for execution and\\n immediate postprocessing information.\\n \\\"\\\"\\\"\\n if self.__noise_model is not None:\\n circuit = self.__noise_model.apply(circuit)\\n samples = circuit(nshots=self.nshots).samples()\\n return {\\\"samples\\\": samples}\\n\\n def save(self) -> None:\\n \\\"\\\"\\\"Creates a path and pickles relevent data from ``self.data`` and\\n if ``self.circuitfactory`` is a list that one too.\\n \\\"\\\"\\\"\\n self.path = experiment_directory(\\\"standardrb\\\")\\n if isinstance(self.circuitfactory, list):\\n with open(f\\\"{self.path}circuits.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.circuitfactory, f)\\n with open(f\\\"{self.path}data.pkl\\\", \\\"wb\\\") as f:\\n pickle.dump(self.data, f)\\n\\n @property\\n def dataframe(self) -> pd.DataFrame:\\n return pd.DataFrame(self.data)\\n\\n def _append_data(self, name: str, datacolumn: list) -> None:\\n \\\"\\\"\\\"Adds data column to ``data`` attribute.\\n\\n Args:\\n name (str): Name of data column.\\n datacolumn (list): A list of the right shape\\n \\\"\\\"\\\"\\n if len(datacolumn) != len(self.data):\\n raise ValueError(\\\"Given data column doesn't have the right length.\\\")\\n df = self.dataframe\\n df[name] = datacolumn\\n self.data = df.to_dict(\\\"records\\\")\\n\\n @property\\n def samples(self) -> np.ndarray:\\n \\\"\\\"\\\"Returns the samples from ``self.data`` in a 2d array.\\n\\n Returns:\\n np.ndarray: 2d array of samples.\\n \\\"\\\"\\\"\\n\\n try:\\n return np.array(self.dataframe[\\\"samples\\\"].tolist())\\n except KeyError:\\n print(\\\"No samples here. Execute experiment first.\\\")\\n return None\\n\\n @property\\n def probabilities(self) -> np.ndarray:\\n \\\"\\\"\\\"Takes the stored samples and returns probabilities for each\\n possible state to occure.\\n\\n Returns:\\n np.ndarray: Probability array of 2 dimension.\\n \\\"\\\"\\\"\\n\\n allsamples = self.samples\\n if allsamples is None:\\n print(\\\"No probabilities either.\\\")\\n return None\\n # Create all possible state vectors.\\n allstates = list(product([0, 1], repeat=len(allsamples[0][0])))\\n # Iterate over all the samples and count the different states.\\n probs = [\\n [np.sum(np.product(samples == state, axis=1)) for state in allstates]\\n for samples in allsamples\\n ]\\n probs = np.array(probs) / (self.nshots)\\n return probs\\n\\n def apply_task(self, gtask):\\n self = gtask(self)\\n\\n\\nclass Result:\\n \\\"\\\"\\\"Once initialized with the correct parameters an Result object can build\\n reports to display results of an randomized benchmarking experiment.\\n \\\"\\\"\\\"\\n\\n def __init__(self, dataframe: pd.DataFrame) -> None:\\n self.df = dataframe\\n self.all_figures = []\\n\\n def extract(self, group_by: str, output: str, agg_type: str):\\n \\\"\\\"\\\"Aggregates the dataframe, extracts the data by which the frame was\\n grouped, what was calculated given the ``agg_type`` parameters.\\n\\n Args:\\n group_by (str): _description_\\n output (str): _description_\\n agg_type (str): _description_\\n \\\"\\\"\\\"\\n grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\\n return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\\n \\n def scatter_fit_fig(self, xdata_scatter, ydata_scatter, xdata, ydata):\\n myfigs = []\\n popt, pcov, x_fit, y_fit = self.fitting_func(xdata, ydata)\\n fig = go.Scatter(\\n x=xdata_scatter,\\n y=ydata_scatter,\\n line=dict(color=\\\"#6597aa\\\"),\\n mode=\\\"markers\\\",\\n marker={\\\"opacity\\\": 0.2, \\\"symbol\\\": \\\"square\\\"},\\n name=\\\"runs\\\",\\n )\\n myfigs.append(fig)\\n fig = go.Scatter(\\n x=xdata, y=ydata, line=dict(color=\\\"#aa6464\\\"), mode=\\\"markers\\\", name=\\\"average\\\"\\n )\",\n \"type\": \"random\"\n },\n {\n \"content\": \"from __future__ import annotations\\n\\nimport pickle\\nfrom collections.abc import Iterable\\nfrom copy import deepcopy\\nfrom itertools import product\\nfrom os.path import isfile\\nimport plotly.graph_objects as go\\n\\nimport numpy as np\\nimport pandas as pd\\nfrom qibo import gates\\nfrom qibo.models import Circuit\\nfrom qibo.noise import NoiseModel\\n\\nfrom qibocal.calibrations.protocols.utils import (\\n ONEQUBIT_CLIFFORD_PARAMS,\\n experiment_directory,\\n)\\n\\n\\nclass Circuitfactory:\\n \\\"\\\"\\\"TODO write documentation\\n TODO make the embedding into lager qubit space possible\\\"\\\"\\\"\\n\\n def __init__(\\n self, nqubits: int, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n self.nqubits = nqubits if nqubits is not None else len(qubits)\\n self.qubits = qubits if qubits is not None else [x for x in range(nqubits)]\\n self.depths = depths\\n self.runs = runs\\n\\n def __len__(self):\\n return self.runs * len(self.depths)\\n\\n def __iter__(self) -> None:\\n self.n = 0\\n return self\\n\\n def __next__(self) -> None:\\n if self.n >= self.runs * len(self.depths):\\n raise StopIteration\\n else:\\n circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\\n self.n += 1\\n # Distribute the circuit onto the given support.\\n bigcircuit = Circuit(self.nqubits)\\n bigcircuit.add(circuit.on_qubits(*self.qubits))\\n return bigcircuit\\n\\n def build_circuit(self, depth: int):\\n raise NotImplementedError\\n\\n\\nclass SingleCliffordsFactory(Circuitfactory):\\n def __init__(\\n self, nqubits: list, depths: list, runs: int, qubits: list = None\\n ) -> None:\\n super().__init__(nqubits, depths, runs, qubits)\\n\\n def build_circuit(self, depth: int):\\n circuit = Circuit(len(self.qubits))\\n for _ in range(depth):\\n circuit.add(self.gates())\\n circuit.add(gates.M(*range(len(self.qubits))))\",\n \"type\": \"random\"\n }\n]"},"gt":{"kind":"list like","value":[" newdata.append(self.single_task(deepcopy(circuit), datarow))"," allsamples = self.samples"," circuit = self.build_circuit(self.depths[self.n % len(self.depths)])"," [np.sum(np.product(samples == state, axis=1)) for state in allstates]"," for samples in allsamples"," samples = circuit(nshots=self.nshots).samples()"," return {\"samples\": samples}"," gates.Unitary("," self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count"," if not isinstance(gate, gates.measurements.M):"," return np.array(self.dataframe[\"samples\"].tolist())"," circuit.add(self.gates())"," circuit.add(gates.M(*range(len(self.qubits))))"," name=\"A: {:.3f}, p: {:.3f}, B: {:.3f}\".format(popt[0], popt[1], popt[2]),"," grouped_df = self.df.groupby(group_by)[output].apply(agg_type)"," df = self.dataframe"," df[name] = datacolumn"," self.data = df.to_dict(\"records\")"," bigcircuit.add(circuit.on_qubits(*self.qubits))"," for plot in plot_list:"," fig.add_trace(plot, row=count//2 + 1, col = count%2+1)"," newcircuit.add("," np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),"," newcircuit.add(gates.M(*idxmap(np.array(gate.init_args[0:]))))"," np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),"," self.df = dataframe"," subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]"," return np.array(grouped_df.index), np.array(grouped_df.values.tolist())"," data = pickle.load(f)"," super().__init__(nqubits, depths, runs, qubits)"," circuitfactory = pickle.load(f)",""," an iterable into a list."," \"\"\"Draws the parameters and builds the unitary Clifford gates for"," return pd.DataFrame(self.data)"," return self"," plot_list = fig_dict['figs']"," myfigs.append(fig)"," return circuit"],"string":"[\n \" newdata.append(self.single_task(deepcopy(circuit), datarow))\",\n \" allsamples = self.samples\",\n \" circuit = self.build_circuit(self.depths[self.n % len(self.depths)])\",\n \" [np.sum(np.product(samples == state, axis=1)) for state in allstates]\",\n \" for samples in allsamples\",\n \" samples = circuit(nshots=self.nshots).samples()\",\n \" return {\\\"samples\\\": samples}\",\n \" gates.Unitary(\",\n \" self.clifford_unitary(*ONEQUBIT_CLIFFORD_PARAMS[rint]), count\",\n \" if not isinstance(gate, gates.measurements.M):\",\n \" return np.array(self.dataframe[\\\"samples\\\"].tolist())\",\n \" circuit.add(self.gates())\",\n \" circuit.add(gates.M(*range(len(self.qubits))))\",\n \" name=\\\"A: {:.3f}, p: {:.3f}, B: {:.3f}\\\".format(popt[0], popt[1], popt[2]),\",\n \" grouped_df = self.df.groupby(group_by)[output].apply(agg_type)\",\n \" df = self.dataframe\",\n \" df[name] = datacolumn\",\n \" self.data = df.to_dict(\\\"records\\\")\",\n \" bigcircuit.add(circuit.on_qubits(*self.qubits))\",\n \" for plot in plot_list:\",\n \" fig.add_trace(plot, row=count//2 + 1, col = count%2+1)\",\n \" newcircuit.add(\",\n \" np.cos(theta / 2) - 1.0j * nz * np.sin(theta / 2),\",\n \" newcircuit.add(gates.M(*idxmap(np.array(gate.init_args[0:]))))\",\n \" np.cos(theta / 2) + 1.0j * nz * np.sin(theta / 2),\",\n \" self.df = dataframe\",\n \" subplot_titles = [figdict.get('subplot_title') for figdict in self.all_figures]\",\n \" return np.array(grouped_df.index), np.array(grouped_df.values.tolist())\",\n \" data = pickle.load(f)\",\n \" super().__init__(nqubits, depths, runs, qubits)\",\n \" circuitfactory = pickle.load(f)\",\n \"\",\n \" an iterable into a list.\",\n \" \\\"\\\"\\\"Draws the parameters and builds the unitary Clifford gates for\",\n \" return pd.DataFrame(self.data)\",\n \" return self\",\n \" plot_list = fig_dict['figs']\",\n \" myfigs.append(fig)\",\n \" return circuit\"\n]"},"metainfo_separator":{"kind":"string","value":" METASEP\n"}}},{"rowIdx":211,"cells":{"repo_id":{"kind":"number","value":33,"string":"33"},"repo_name":{"kind":"string","value":"codeforphilly__paws-data-pipeline"},"project_context":{"kind":"string","value":"codeforphilly__paws-data-pipeline METASEP\n\nsrc/server/api/API_ingest/updated_data.py METASEP\n\nfrom sqlalchemy.orm import sessionmaker\nfrom simple_salesforce import Salesforce\nfrom config import engine\n\nimport structlog\nlogger = structlog.get_logger()\n\n\ndef get_updated_contact_data():\n Session = sessionmaker(engine)\n\n qry = \"\"\" -- Collect latest foster/volunteer dates\n with ev_dates as \n (select \n person_id, \n max(case when event_type=1 then time else null end) adopt,\n max(case when event_type=2 then time else null end) foster_out,\n -- max(case when event_type=3 then time else null end) rto,\n max(case when event_type=5 then time else null end) foster_return\n \n from \n sl_animal_events sla\n left join sl_event_types sle on sle.id = sla.event_type\n \n where sle.id in (1,2,5)\n group by person_id\n order by person_id\n )\n \n\n select json_agg (upd) as \"cd\" from (\n select\n slsf.source_id as \"contactId\" , -- long salesforce string\n slp.id as \"personId\" , -- short PAWS-local shelterluv id\n \n case\n when \n (extract(epoch from now())::bigint - foster_out < 365*86400) -- foster out in last year \n or (extract(epoch from now())::bigint - foster_return < 365*86400) -- foster return \n then 'Active'\n else 'Inactive'\n end as \"updatedFosterStatus\" ,\n \n (to_timestamp(foster_out ) at time zone 'America/New_York')::date as \"updatedFosterStartDate\",\n (to_timestamp(foster_return ) at time zone 'America/New_York')::date as \"updatedFosterEndDate\",\n\n min(vs.from_date) as \"updatedFirstVolunteerDate\",\n max(vs.from_date) as \"updatedLastVolunteerDate\",\n vc.source_id as \"volgisticsId\" \n\n \n from \n ev_dates\n left join pdp_contacts slc on slc.source_id = person_id::text and slc.source_type = 'shelterluvpeople'\n left join pdp_contacts slsf on slsf.matching_id = slc.matching_id and slsf.source_type = 'salesforcecontacts'\n left join shelterluvpeople slp on slp.internal_id = person_id::text\n left join pdp_contacts vc on vc.matching_id = slc.matching_id and vc.source_type = 'volgistics'\n left join volgisticsshifts vs on vs.volg_id::text = vc.source_id \n\n where \n slsf.source_id is not null\n\n group by\n slsf.source_id,\n slp.id,\n vc.source_id,\n foster_out ,\n foster_return\n \n ) upd ;\n\n\n \"\"\"\n\n with Session() as session:\n result = session.execute(qry)\n sfdata = result.fetchone()[0]\n logger.debug(\"Query for Salesforce update returned %d records\", len(sfdata))\n return sfdata\nsrc/server/api/API_ingest/sl_animal_events.py METASEP\nimport json\nimport os\nimport posixpath as path\n\nimport structlog\n\nlogger = structlog.get_logger()\n\nimport requests\n\nfrom api.API_ingest import shelterluv_db\n\n# There are a number of different record types. These are the ones we care about.\nkeep_record_types = [\n \"Outcome.Adoption\",\n \"Outcome.Foster\",\n \"Outcome.ReturnToOwner\",\n \"Intake.AdoptionReturn\",\n \"Intake.FosterReturn\"\n]\n\n# from config import engine\n# from flask import current_app\n# from sqlalchemy.sql import text\n\nBASE_URL = \"http://shelterluv.com/api/\"\nMAX_COUNT = 100 # Max records the API will return for one call\n\n# Get the API key\ntry:\n from secrets_dict import SHELTERLUV_SECRET_TOKEN\nexcept ImportError:\n # Not running locally\n from os import environ\n\n try:\n SHELTERLUV_SECRET_TOKEN = environ[\"SHELTERLUV_SECRET_TOKEN\"]\n except KeyError:\n # Not in environment\n # You're SOL for now\n logger.error(\"Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment\")\n\n\nTEST_MODE=os.getenv(\"TEST_MODE\") # if not present, has value None\n\nheaders = {\"Accept\": \"application/json\", \"X-API-Key\": SHELTERLUV_SECRET_TOKEN}\n\n# Sample response from events request:\n\n# {\n# \"success\": 1,\n# \"events\": [\n# {\n# \"Type\": \"Outcome.Adoption\",\n# \"Subtype\": \"PAC\",\n# \"Time\": \"1656536900\",\n# \"User\": \"phlp_mxxxx\",\n# \"AssociatedRecords\": [\n# {\n# \"Type\": \"Animal\",\n# \"Id\": \"5276xxxx\"\n# },\n# {\n# \"Type\": \"Person\",\n# \"Id\": \"5633xxxx\"\n# }\n# ]\n# },\n# {...}\n# ],\n# \"has_more\": true,\n# \"total_count\": 67467\n# }\n\n\ndef get_event_count():\n \"\"\"Test that server is operational and get total event count.\"\"\"\n events = \"v1/events&offset=0&limit=1\"\n URL = path.join(BASE_URL, events)\n\n try:\n response = requests.request(\"GET\", URL, headers=headers)\n except Exception as e:\n logger.error(\"get_event_count failed with \", e)\n return -2\n\n if response.status_code != 200:\n logger.error(\"get_event_count \", response.status_code, \"code\")\n return -3\n\n try:\n decoded = json.loads(response.text)\n except json.decoder.JSONDecodeError as e:\n logger.error(\"get_event_count JSON decode failed with\", e)\n return -4\n\n if decoded[\"success\"]:\n return decoded[\"total_count\"]\n else:\n logger.error(decoded['error_message'])\n return -5 # AFAICT, this means URL was bad\n\n\ndef get_events_bulk():\n \"\"\"Pull all event records from SL \"\"\"\n\n # Interesting API design - event record 0 is the newest. But since we pull all records each time it doesn't\n # really matter which direction we go. Simplest to count up, and we can pull until 'has_more' goes false.\n # Good news, the API is robust and won't blow up if you request past the end.\n # At 100 per request, API returns about 5000 records/minute\n\n event_records = []\n\n raw_url = path.join(BASE_URL, \"v1/events&offset={0}&limit={1}\")\n offset = 0\n limit = MAX_COUNT\n more_records = True\n\n while more_records:\n\n url = raw_url.format(offset, limit)\n\n try:\n response = requests.request(\"GET\", url, headers=headers)\n except Exception as e:\n logger.error(\"get_events failed with \", e)\n return -2\n\n if response.status_code != 200:\n logger.error(\"get_event_count \", response.status_code, \"code\")\n return -3\n\n try:\n decoded = json.loads(response.text)\n except json.decoder.JSONDecodeError as e:\n logger.error(\"get_event_count JSON decode failed with\", e)\n return -4\n\n if decoded[\"success\"]:\n for evrec in decoded[\"events\"]:\n if evrec[\"Type\"] in keep_record_types:\n event_records.append(evrec)\n\n more_records = decoded[\"has_more\"] # if so, we'll make another pass\n offset += limit\n if offset % 1000 == 0:\n logger.debug(\"Reading offset %s\", str(offset))\n if TEST_MODE and offset > 1000:\n more_records=False # Break out early \n\n else:\n return -5 # AFAICT, this means URL was bad\n\n return event_records\n\n\ndef slae_test():\n total_count = get_event_count()\n logger.debug(\"Total events: %d\", total_count)\n\n b = get_events_bulk()\n logger.debug(\"Stored records: %d\", len(b))\n\n # f = filter_events(b)\n # print(f)\n\n count = shelterluv_db.insert_events(b)\n return count\n\ndef store_all_animals_and_events():\n total_count = get_event_count()\n logger.debug(\"Total events: %d\", total_count)\n\n b = get_events_bulk()\n logger.debug(\"Stored records: %d\", len(b))\n\n # f = filter_events(b)\n # print(f)\n\n count = shelterluv_db.insert_events(b)\n return count\n\n\n# Query to get last adopt/foster event:\n\n# \"\"\"\n# select \n# person_id as sl_person_id, max(to_timestamp(time)::date) as last_fosteradopt_event\n# from \n# sl_animal_events\n# where event_type < 4 -- check this\n# group by \n# person_id\n# order by \n# person_id asc;\n# \"\"\"\n# Volgistics last shift \n\n# \"\"\"\n# select \n# volg_id, max(from_date) as last_shift\n# from\n# volgisticsshifts\n# group by \n# volg_id\n# order by \n# volg_id ;\n# \"\"\"\nsrc/server/api/API_ingest/shelterluv_people.py METASEP\nimport requests, os\nfrom models import ShelterluvPeople\nfrom config import engine\nfrom sqlalchemy.orm import sessionmaker\nimport structlog\nlogger = structlog.get_logger()\n\ntry:\n from secrets_dict import SHELTERLUV_SECRET_TOKEN\nexcept ImportError:\n # Not running locally\n logger.debug(\"Couldn't get SHELTERLUV_SECRET_TOKEN from file, trying environment **********\")\n from os import environ\n\n try:\n SHELTERLUV_SECRET_TOKEN = environ['SHELTERLUV_SECRET_TOKEN']\n except KeyError:\n # Not in environment\n # You're SOL for now\n logger.error(\"Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment\")\n\n\n\nTEST_MODE=os.getenv(\"TEST_MODE\") # if not present, has value None\nLIMIT = 100\n#################################\n# This script is used to fetch data from shelterluv API.\n# Please be mindful of your usage.\n# example: /people will fetch the data of all people. and send approximately 300 requests.\n# https://help.shelterluv.com/hc/en-us/articles/115000580127-Shelterluv-API-Overview\n#################################\n\n######## Insights ###############\n# Max result items is 100 - even though it's not specifically specified in the above reference\n# /people has all the data. it seems that /person/:id isn't used\n#################################\n\n''' Iterate over all shelterlove people and store in json file in the raw data folder\nWe fetch 100 items in each request, since that is the limit based on our research '''\ndef store_shelterluv_people_all():\n offset = 0\n has_more = True\n Session = sessionmaker(engine)\n\n with Session() as session:\n logger.debug(\"Truncating table shelterluvpeople\")\n\n session.execute(\"TRUNCATE TABLE shelterluvpeople\")\n\n logger.debug(\"Start getting shelterluv contacts from people table\")\n\n while has_more:\n r = requests.get(\"http://shelterluv.com/api/v1/people?limit={}&offset={}\".format(LIMIT, offset),\n headers={\"x-api-key\": SHELTERLUV_SECRET_TOKEN})\n response = r.json()\n for person in response[\"people\"]:\n #todo: Does this need more \"null checks\"?\n session.add(ShelterluvPeople(firstname=person[\"Firstname\"],\n lastname=person[\"Lastname\"],\n id=person[\"ID\"] if \"ID\" in person else None,\n internal_id=person[\"Internal-ID\"],\n associated=person[\"Associated\"],\n street=person[\"Street\"],\n apartment=person[\"Apartment\"],\n city=person[\"City\"],\n state=person[\"State\"],\n zip=person[\"Zip\"],\n email=person[\"Email\"],\n phone=person[\"Phone\"],\n animal_ids=person[\"Animal_ids\"]))\n offset += LIMIT\n has_more = response[\"has_more\"] if not TEST_MODE else response[\"has_more\"] and offset < 1000\n if offset % 1000 == 0:\n logger.debug(\"Reading offset %s\", str(offset))\n session.commit()\n\n logger.debug(\"Finished getting shelterluv contacts from people table\")\n return offset\n\n\nsrc/server/api/API_ingest/shelterluv_db.py METASEP\nfrom sqlalchemy import Table, MetaData\nfrom sqlalchemy.orm import sessionmaker\n\nfrom config import engine\n\nimport structlog\nlogger = structlog.get_logger()\n\n\ndef insert_animals(animal_list):\n \"\"\"Insert animal records into shelterluv_animals table and return row count. \"\"\"\n\n Session = sessionmaker(engine)\n session = Session()\n metadata = MetaData()\n sla = Table(\"shelterluv_animals\", metadata, autoload=True, autoload_with=engine)\n\n # From Shelterluv: ['ID', 'Internal-ID', 'Name', 'Type', 'DOBUnixTime', 'CoverPhoto', 'LastUpdatedUnixTime']\n # In db: ['local_id', 'id' (PK), 'name', 'type', 'dob', 'photo', 'update_stamp']\n\n ins_list = [] # Create a list of per-row dicts\n for rec in animal_list:\n ins_list.append(\n {\n \"id\": rec[\"Internal-ID\"],\n \"local_id\": rec[\"ID\"] if rec[\"ID\"] else 0, # Sometimes there's no local id\n \"name\": rec[\"Name\"],\n \"type\": rec[\"Type\"],\n \"dob\": rec[\"DOBUnixTime\"],\n \"update_stamp\": rec[\"LastUpdatedUnixTime\"],\n \"photo\": rec[\"CoverPhoto\"],\n }\n )\n\n ret = session.execute(sla.insert(ins_list))\n\n session.commit() # Commit all inserted rows\n session.close()\n\n return ret.rowcount\n\n\ndef truncate_animals():\n \"\"\"Truncate the shelterluv_animals table\"\"\"\n\n Session = sessionmaker(engine)\n session = Session()\n\n truncate = \"TRUNCATE table shelterluv_animals;\"\n session.execute(truncate)\n\n session.commit() # Commit all inserted rows\n session.close()\n\n return 0\n\n\ndef truncate_events():\n \"\"\"Truncate the shelterluv_events table\"\"\"\n\n Session = sessionmaker(engine)\n with Session() as session:\n truncate = \"TRUNCATE table sl_animal_events;\"\n session.execute(truncate)\n session.commit()\n\n return 0\n\ndef insert_events(event_list):\n \"\"\"Insert event records into sl_animal_events table and return row count. \"\"\"\n\n # Always a clean insert\n truncate_events()\n\n Session = sessionmaker(engine)\n with Session() as session:\n metadata = MetaData()\n sla = Table(\"sl_animal_events\", metadata, autoload=True, autoload_with=engine)\n\n # TODO: Pull from DB - inserted in db_setup/base_users.py/populate_sl_event_types()\n event_map = {\n \"Outcome.Adoption\": 1,\n \"Outcome.Foster\": 2,\n \"Outcome.ReturnToOwner\": 3,\n \"Intake.AdoptionReturn\": 4,\n \"Intake.FosterReturn\":5\n }\n\n # \"\"\" INSERT INTO \"sl_event_types\" (\"id\",\"event_name\") VALUES\n # ( 1,'Outcome.Adoption' ),\n # ( 2,'Outcome.Foster' ),\n # ( 3,'Outcome.ReturnToOwner' ),\n # ( 4,'Intake.AdoptionReturn' ),\n # ( 5,'Intake.FosterReturn' ) \"\"\"\n\n\n\n\n # Event record: [ AssociatedRecords[Type = Person][\"Id\"]',\n # AssociatedRecords[Type = Animal][\"Id\"]',\n # \"Type\",\n # \"Time\"\n # ]\n #\n # In db: ['id',\n # 'person_id',\n # 'animal_id',\n # 'event_type',\n # 'time']\n\n ins_list = [] # Create a list of per-row dicts\n for rec in event_list:\n ins_list.append(\n {\n \"person_id\": next(\n filter(lambda x: x[\"Type\"] == \"Person\", rec[\"AssociatedRecords\"])\n )[\"Id\"],\n \"animal_id\": next(\n filter(lambda x: x[\"Type\"] == \"Animal\", rec[\"AssociatedRecords\"])\n )[\"Id\"],\n \"event_type\": event_map[rec[\"Type\"]],\n \"time\": rec[\"Time\"],\n }\n )\n\n # TODO: Wrap with try/catch\n ret = session.execute(sla.insert(ins_list))\n session.commit()\n logger.debug(\"finished inserting events\")\n\n return ret.rowcount\n\n\nsrc/server/api/API_ingest/shelterluv_animals.py METASEP\nimport os, time, json\nimport posixpath as path\n\nimport requests\n\nfrom api.API_ingest import shelterluv_db\nfrom server.api.API_ingest.shelterluv_db import insert_animals\n\n\n# from config import engine\n# from flask import current_app\n# from sqlalchemy.sql import text\n\nBASE_URL = 'http://shelterluv.com/api/'\nMAX_COUNT = 100 # Max records the API will return for one call\n\ntry:\n from secrets_dict import SHELTERLUV_SECRET_TOKEN\nexcept ImportError:\n # Not running locally\n from os import environ\n\n try:\n SHELTERLUV_SECRET_TOKEN = environ['SHELTERLUV_SECRET_TOKEN']\n except KeyError:\n # Not in environment\n # You're SOL for now\n print(\"Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment\")\n\n\n\nheaders = {\n \"Accept\": \"application/json\",\n \"X-API-Key\": SHELTERLUV_SECRET_TOKEN\n}\n\nlogger = print\n\ndef get_animal_count():\n \"\"\"Test that server is operational and get total animal count.\"\"\"\n animals = 'v1/animals&offset=0&limit=1'\n URL = path.join(BASE_URL,animals)\n\n try:\n response = requests.request(\"GET\",URL, headers=headers)\n except Exception as e:\n logger('get_animal_count failed with ', e)\n return -2\n\n if response.status_code != 200:\n logger(\"get_animal_count \", response.status_code, \"code\")\n return -3\n\n try:\n decoded = json.loads(response.text)\n except json.decoder.JSONDecodeError as e:\n logger(\"get_animal_count JSON decode failed with\", e)\n return -4\n\n if decoded['success']:\n return decoded['total_count']\n else:\n return -5 # AFAICT, this means URL was bad\n\n\ndef get_updated_animal_count(last_update):\n \"\"\"Test that server is operational and get total animal count.\"\"\"\n animals = 'v1/animals&offset=0&limit=1&sort=updated_at&since=' + str(last_update)\n URL = path.join(BASE_URL,animals)\n\n try:\n response = requests.request(\"GET\",URL, headers=headers)\n except Exception as e:\n logger('get_updated_animal_count failed with ', e)\n return -2\n\n if response.status_code != 200:\n logger(\"get_updated_animal_count \", response.status_code, \"code\")\n return -3\n\n try:\n decoded = json.loads(response.text)\n except json.decoder.JSONDecodeError as e:\n logger(\"get_updated_animal_count JSON decode failed with\", e)\n return -4\n\n if decoded['success']:\n return decoded['total_count']\n else:\n return -5 # AFAICT, this means URL was bad\n\n\n\n\ndef filter_animals(raw_list):\n \"\"\"Given a list of animal records as returned by SL, return a list of records with only the fields we care about.\"\"\"\n\n good_keys = ['ID', 'Internal-ID', 'Name', 'Type', 'DOBUnixTime', 'CoverPhoto','LastUpdatedUnixTime']\n\n filtered = []\n\n for r in raw_list:\n f = {}\n for k in good_keys:\n try:\n f[k] = r[k]\n except:\n if k in ('DOBUnixTime','LastUpdatedUnixTime'):\n f[k] = 0\n else:\n f[k] = ''\n filtered.append(f)\n\n return filtered\n\n\n\n\ndef get_animals_bulk(total_count):\n \"\"\"Pull all animal records from SL \"\"\"\n\n # 'Great' API design - animal record 0 is the newest, so we need to start at the end, \n # back up MAX_COUNT rows, make our request, then keep backing up. We need to keep checking\n # the total records to ensure one wasn't added in the middle of the process.\n # Good news, the API is robust and won't blow up if you request past the end.\n\n raw_url = path.join(BASE_URL, 'v1/animals&offset={0}&limit={1}')\n\n start_record = int(total_count) \n offset = (start_record - MAX_COUNT) if (start_record - MAX_COUNT) > -1 else 0\n limit = MAX_COUNT\n\n while offset > -1 :\n\n logger(\"getting at offset\", offset)\n url = raw_url.format(offset,limit)\n\n try:\n response = requests.request(\"GET\",url, headers=headers)\n except Exception as e:\n logger('get_animals failed with ', e)\n return -2\n\n if response.status_code != 200:\n logger(\"get_animal_count \", response.status_code, \"code\")\n return -3\n\n try:\n decoded = json.loads(response.text)\n except json.decoder.JSONDecodeError as e:\n logger(\"get_animal_count JSON decode failed with\", e)\n return -4\n\n if decoded['success']:\n insert_animals( filter_animals(decoded['animals']) )\n if offset == 0:\n break\n offset -= MAX_COUNT \n if offset < 0 :\n limit = limit + offset\n offset = 0\n else:\n return -5 # AFAICT, this means URL was bad\n\n return 'zero'\n\n\ndef update_animals(last_update):\n \"\"\"Get the animals inserted or updated since last check, insert/update db records. \"\"\"\n\n updated_records = get_updated_animal_count(last_update)\n\n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ndef sla_test():\n total_count = get_animal_count()\n print('Total animals:',total_count)\n\n b = get_animals_bulk(total_count)\n print(len(b))\n\n # f = filter_animals(b)\n # print(f)\n\n # count = shelterluv_db.insert_animals(f)\n return len(b)\n\n# if __name__ == '__main__' : \n\n# total_count = get_animal_count()\n# print('Total animals:',total_count)\n\n# b = get_animals_bulk(9)\n# print(len(b))\n\n# f = filter_animals(b)\n# print(f)\n\n# count = shelterluv_db.insert_animals(f)\nsrc/server/api/API_ingest/salesforce_contacts.py METASEP\nimport os\n\nimport structlog\nfrom simple_salesforce import Salesforce\nfrom sqlalchemy.orm import sessionmaker\n\nfrom config import engine\nfrom models import SalesForceContacts\n\nlogger = structlog.get_logger()\n\nTEST_MODE = os.getenv(\"TEST_MODE\") # if not present, has value None\n\ndef store_contacts_all():\n Session = sessionmaker(engine)\n with Session() as session:\n\n logger.debug(\"truncating table salesforcecontacts\")\n session.execute(\"TRUNCATE TABLE salesforcecontacts\")\n\n logger.debug(\"retrieving the latest salesforce contacts data\")\n\n if os.path.exists('server/bin/connected-app-secrets.pem'):\n pem_file = 'server/bin/connected-app-secrets.pem'\n elif os.path.exists('bin/connected-app-secrets.pem'):\n pem_file = 'bin/connected-app-secrets.pem'\n else:\n logger.error(\"Missing salesforce jwt private key pem file, skipping data pull\")\n return\n\n sf = Salesforce(username=os.getenv('SALESFORCE_USERNAME'), consumer_key=os.getenv('SALESFORCE_CONSUMER_KEY'),\n privatekey_file=pem_file)\n results = sf.query(\"SELECT Contact_ID_18__c, FirstName, LastName, Contact.Account.Name, MailingCountry, MailingStreet, MailingCity, MailingState, MailingPostalCode, Phone, MobilePhone, Email FROM Contact\")\n logger.debug(\"%d total Salesforce contact records\", results['totalSize'])\n if TEST_MODE:\n logger.debug(\"running in test mode so only downloading first page of Salesforce contacts\")\n\n total_records = 0\n done = False\n while not done:\n total_records += len(results['records'])\n logger.debug(\"Query returned %d Salesforce contact records, total %d\", len(results['records']), total_records)\n for row in results['records']:\n account_name = row['Account']['Name'] if row['Account'] is not None else None\n contact = SalesForceContacts(contact_id=row['Contact_ID_18__c'],\n first_name=row['FirstName'],\n last_name=row['LastName'],\n account_name=account_name,\n mailing_country=row['MailingCountry'],\n mailing_street=row['MailingStreet'],\n mailing_city=row['MailingCity'],\n mailing_state_province=row['MailingState'],\n mailing_zip_postal_code=row['MailingPostalCode'],\n phone=row['Phone'],\n mobile=row['MobilePhone'],\n email=row['Email'])\n session.add(contact)\n # if in test mode only return first page of results\n done = results['done'] if not TEST_MODE else True\n if not done:\n results = sf.query_more(results['nextRecordsUrl'], True)\n logger.debug(\"Committing downloaded contact records\")\n session.commit()\n logger.debug(\"finished downloading latest salesforce contacts data\")\n\nsrc/server/api/API_ingest/ingest_sources_from_api.py METASEP\nfrom api.API_ingest import shelterluv_people, salesforce_contacts, sl_animal_events\nimport structlog\nlogger = structlog.get_logger()\n\ndef start():\n logger.debug(\"Start Fetching raw data from different API sources\")\n\n logger.debug(\" Fetching Salesforce contacts\")\n salesforce_contacts.store_contacts_all()\n logger.debug(\" Finished fetching Salesforce contacts\")\n\n logger.debug(\" Fetching Shelterluv people\")\n slp_count = shelterluv_people.store_shelterluv_people_all()\n logger.debug(\" Finished fetching Shelterluv people - %d records\" , slp_count)\n\n logger.debug(\" Fetching Shelterluv events\")\n sle_count = sl_animal_events.store_all_animals_and_events()\n logger.debug(\" Finished fetching Shelterluv events - %d records\" , sle_count)\n\n logger.debug(\"Finished fetching raw data from different API sources\")\n\n\nsrc/server/api/API_ingest/dropbox_handler.py METASEP\nimport dropbox\nimport structlog\nlogger = structlog.get_logger()\n\ntry:\n from secrets_dict import DROPBOX_APP\nexcept ImportError:\n # Not running locally\n logger.debug(\"Couldn't get DROPBOX_APP from file, trying environment **********\")\n from os import environ\n\n try:\n DROPBOX_APP = environ['DROPBOX_APP']\n except KeyError:\n # Not in environment\n # You're SOL for now\n logger.error(\"Couldn't get DROPBOX_APP from file or environment\")\n\n\nclass TransferData:\n def __init__(self, access_token):\n self.access_token = access_token\n\n def upload_file(self, file_from, file_to):\n dbx = dropbox.Dropbox(self.access_token)\n\n with open(file_from, 'rb') as f:\n dbx.files_upload(f.read(), file_to)\n\n\ndef upload_file_to_dropbox(file_path, upload_path):\n access_token = DROPBOX_APP\n transfer_data = TransferData(access_token)\n\n file_from = file_path\n file_to = upload_path # The full path to upload the file to, including the file name\n\n transfer_data.upload_file(file_from, file_to)\nsrc/server/api/API_ingest/__init__.py METASEP\n\nsrc/server/alembic/versions/fd187937528b_create_pdp_contacts_table.py METASEP\n\"\"\"create pdp_contacts table\n\nRevision ID: fd187937528b\nRevises: 57b547e9b464\nCreate Date: 2021-08-10 20:16:54.169168\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects.postgresql import JSONB\nimport datetime\n\n# revision identifiers, used by Alembic.\nrevision = 'fd187937528b'\ndown_revision = '57b547e9b464'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n \n op.create_table('pdp_contacts',\n sa.Column('_id', sa.Integer, primary_key=True, autoincrement=True),\n sa.Column('matching_id', sa.Integer, primary_key=True),\n sa.Column('source_type', sa.String, nullable=False),\n sa.Column('source_id', sa.String, nullable=False),\n sa.Column('is_organization', sa.Boolean),\n sa.Column('first_name', sa.String),\n sa.Column('last_name', sa.String),\n sa.Column('email', sa.String),\n sa.Column('mobile', sa.String),\n sa.Column('street_and_number', sa.String),\n sa.Column('apartment', sa.String),\n sa.Column('city', sa.String),\n sa.Column('state', sa.String),\n sa.Column('zip', sa.String),\n sa.Column('json', JSONB),\n sa.Column('created_date', sa.DateTime, default=datetime.datetime.utcnow),\n sa.Column('archived_date', sa.DateTime, default=None)\n )\n\ndef downgrade():\n \n op.drop_table(\"pdp_contacts\")\n op.drop_table(\"pdp_contact_types\")\n\nsrc/server/alembic/versions/fc7325372396_merge_heads.py METASEP\n\"\"\"Merges heads '8f4, '28b\n\nRevision ID: fc7325372396\nRevises: a3ba63dee8f4, fd187937528b\nCreate Date: 2022-01-17 22:05:05.824901\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'fc7325372396'\ndown_revision = ('a3ba63dee8f4', 'fd187937528b')\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n pass\n\n\ndef downgrade():\n pass\n\nsrc/server/alembic/versions/f3d30db17bed_change_pdp_users_password_to_bytea.py METASEP\n\"\"\"Change pdp_users.password to bytea\n\nRevision ID: f3d30db17bed\nRevises: 41da831646e4\nCreate Date: 2020-12-16 21:26:08.548724\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"f3d30db17bed\"\ndown_revision = \"41da831646e4\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.drop_column(\"pdp_users\", \"password\")\n op.add_column(\"pdp_users\", sa.Column(\"password\", sa.LargeBinary, nullable=False))\n\n\ndef downgrade():\n op.drop_column(\"pdp_users\", \"password\")\n op.add_column(\"pdp_users\", \"password\", sa.String(50), nullable=False),\n\nsrc/server/alembic/versions/e3ef522bd3d9_explicit_create_sfd.py METASEP\n\"\"\"Explicit creation for salesforcedonations\n\nRevision ID: e3ef522bd3d9\nRevises: bfb1262d3195\nCreate Date: 2021-06-18 21:55:56.651101\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'e3ef522bd3d9'\ndown_revision = 'bfb1262d3195'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table (\n \"salesforcedonations\",\n sa.Column(\"_id\", sa.Integer, primary_key=True),\n sa.Column(\"opp_id\", sa.String(), nullable=False),\n sa.Column(\"recurring_donor\", sa.Boolean, nullable=False),\n sa.Column(\"primary_contact\", sa.String(), nullable=True),\n sa.Column(\"contact_id\", sa.String(), nullable=False),\n sa.Column(\"amount\", sa.DECIMAL, nullable=False),\n sa.Column(\"close_date\", sa.Date, nullable=False),\n sa.Column(\"donation_type\", sa.String(), nullable=True), \n sa.Column(\"primary_campaign_source\", sa.String(),nullable=True)\n )\n\n op.execute(\"\"\"CREATE INDEX sfd_contact_id_idx \n ON public.salesforcedonations USING btree (contact_id);\"\"\"\n )\n\n op.create_unique_constraint( \"uq_donation\", \"salesforcedonations\", [\"opp_id\", \"contact_id\", \"close_date\", \"amount\"] ) \n\n\ndef downgrade():\n op.drop_table(\"salesforcedonations\")\nsrc/server/alembic/versions/d0841384d5d7_explicitly_create_vshifts.py METASEP\n\"\"\"Explicitly create vshifts\n\nRevision ID: d0841384d5d7\nRevises: e3ef522bd3d9\nCreate Date: 2021-07-05 22:05:52.743905\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'd0841384d5d7'\ndown_revision = 'e3ef522bd3d9'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table (\n \"volgisticsshifts\",\n sa.Column(\"_id\", sa.Integer, primary_key=True),\n sa.Column(\"volg_id\", sa.Integer, nullable=False),\n sa.Column(\"assignment\", sa.String(), nullable=True),\n sa.Column(\"site\", sa.String(), nullable=True), \n sa.Column(\"from_date\", sa.Date, nullable=False),\n sa.Column(\"hours\", sa.DECIMAL, nullable=False)\n )\n\n op.execute(\"\"\"CREATE INDEX vs_volg_id_idx \n ON public.volgisticsshifts USING btree (volg_id);\"\"\"\n )\n\n op.create_unique_constraint( \"uq_shift\", \"volgisticsshifts\", [\"volg_id\", \"assignment\", \"from_date\", \"hours\"] ) \n\n\ndef downgrade():\n op.drop_table(\"volgisticsshifts\")\n\nsrc/server/alembic/versions/bfb1262d3195_create_execution_status_table.py METASEP\n\"\"\"create execution status table\n\nRevision ID: bfb1262d3195\nRevises: 05e0693f8cbb\nCreate Date: 2021-05-28 16:12:40.561829\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.sql.sqltypes import Integer\nfrom sqlalchemy.sql import func\n\n# revision identifiers, used by Alembic.\nrevision = 'bfb1262d3195'\ndown_revision = '05e0693f8cbb'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table (\n \"execution_status\",\n sa.Column(\"_id\", sa.Integer, primary_key=True),\n sa.Column(\"job_id\", sa.Integer, nullable=False),\n sa.Column(\"stage\", sa.String(32), nullable=False),\n sa.Column(\"status\", sa.String(32), nullable=False),\n sa.Column(\"details\", sa.String(128), nullable=False),\n sa.Column(\"update_stamp\", sa.DateTime, nullable=False, server_default=func.now())\n )\n\n op.execute(\"\"\"CREATE FUNCTION last_upd_trig() RETURNS trigger\n LANGUAGE plpgsql AS\n $$BEGIN\n NEW.update_stamp := current_timestamp;\n RETURN NEW;\n END;$$;\"\"\")\n\n op.execute(\"\"\"CREATE TRIGGER last_upd_trigger\n BEFORE INSERT OR UPDATE ON execution_status\n FOR EACH ROW\n EXECUTE PROCEDURE last_upd_trig();\"\"\"\n ) # Postgres-specific, obviously \n\n op.create_unique_constraint(\"uq_job_id\", \"execution_status\", [\"job_id\"])\n\ndef downgrade():\n op.drop_table(\"execution_status\")\n op.execute(\"DROP FUNCTION last_upd_trig()\")\nsrc/server/alembic/versions/a3ba63dee8f4_rmv_details_size_limit.py METASEP\n\"\"\"Remove execution_status.details field size limit\n\nRevision ID: a3ba63dee8f4\nRevises: 40be910424f0\nCreate Date: 2021-09-18 18:14:48.044985\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = 'a3ba63dee8f4'\ndown_revision = '40be910424f0'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.alter_column('execution_status',\"details\", type_=sa.String(None) )\n\n\n\ndef downgrade():\n op.alter_column('execution_status',\"details\", type_=sa.String(128) )\n\n\nsrc/server/alembic/versions/9687db7928ee_shelterluv_animals.py METASEP\n\"\"\"Create SL_animals table\n\nRevision ID: 9687db7928ee\nRevises: 45a668fa6325\nCreate Date: 2021-12-24 21:15:33.399197\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '9687db7928ee'\ndown_revision = '45a668fa6325'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table (\n \"shelterluv_animals\",\n sa.Column(\"id\", sa.BigInteger, primary_key=True),\n sa.Column(\"local_id\", sa.BigInteger, nullable=False),\n sa.Column(\"name\", sa.Text, nullable=False),\n sa.Column(\"type\", sa.Text, nullable=False),\n sa.Column(\"dob\", sa.BigInteger, nullable=False),\n sa.Column(\"update_stamp\", sa.BigInteger, nullable=False),\n sa.Column(\"photo\", sa.Text, nullable=False)\n )\n\n\ndef downgrade():\n op.drop_table(\"shelterluv_animals\")\n\nsrc/server/alembic/versions/90f471ac445c_create_sl_events.py METASEP\n\"\"\"Shelterluv animal events table\n\nRevision ID: 90f471ac445c\nRevises: 9687db7928ee\nCreate Date: 2022-09-04 17:21:51.511030\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '90f471ac445c'\ndown_revision = '9687db7928ee'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table (\n \"sl_event_types\",\n sa.Column(\"id\", sa.Integer, autoincrement=True, primary_key=True),\n sa.Column(\"event_name\", sa.Text, nullable=False),\n )\n \n op.create_table (\n \"sl_animal_events\",\n sa.Column(\"id\", sa.Integer, autoincrement=True, primary_key=True),\n sa.Column(\"person_id\", sa.Integer, nullable=False),\n sa.Column(\"animal_id\", sa.Integer, nullable=False),\n sa.Column(\"event_type\", sa.Integer, sa.ForeignKey('sl_event_types.id')),\n sa.Column(\"time\", sa.BigInteger, nullable=False)\n )\n \n op.create_index('sla_idx', 'sl_animal_events', ['person_id'])\n\n\n\ndef downgrade():\n op.drop_table(\"sl_animal_events\")\n op.drop_table(\"sl_event_types\")\nsrc/server/alembic/versions/783cabf889d9_inital_schema_setup.py METASEP\n\"\"\"inital schema setup\n\nRevision ID: 783cabf889d9\nRevises: \nCreate Date: 2020-12-16 01:47:43.686881\n\n\"\"\"\nfrom sqlalchemy.sql.expression import null\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n\n# revision identifiers, used by Alembic.\nrevision = '783cabf889d9'\ndown_revision = None\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table(\n 'pdp_user_roles',\n sa.Column('_id', sa.Integer, primary_key=True),\n sa.Column('role', sa.String(50), nullable=False)\n )\n\n op.create_table(\n 'pdp_users',\n sa.Column('_id', sa.Integer, primary_key=True),\n sa.Column('username', sa.String(50), nullable=False),\n sa.Column('role', sa.String(50), nullable=False),\n sa.Column('password', sa.String(50), nullable=False),\n sa.Column('active', sa.String(50), nullable=False),\n sa.Column('created', sa.DateTime,nullable=False, server_default='now()')\n )\n\ndef downgrade():\n pass\nsrc/server/alembic/versions/72d50d531bd5_fix_pdp_users_timestamp.py METASEP\n\"\"\"Fix pdp_users timestamp\n\nRevision ID: 72d50d531bd5\nRevises: 783cabf889d9\nCreate Date: 2020-12-16 15:22:54.734670\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.sql import func\n\n# revision identifiers, used by Alembic.\nrevision = \"72d50d531bd5\"\ndown_revision = \"783cabf889d9\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.drop_column(\"pdp_users\", \"created\")\n op.add_column(\n \"pdp_users\",\n sa.Column(\"created\", sa.DateTime, nullable=False, server_default=func.now()),\n )\n\n\ndef downgrade():\n sa.Column(\"created\", sa.DateTime, nullable=False, server_default=\"now()\")\n\nsrc/server/alembic/versions/7138d52f92d6_add_uniqueness_constraints.py METASEP\n\"\"\"add uniqueness constraints\n\nRevision ID: 7138d52f92d6\nRevises: f3d30db17bed\nCreate Date: 2020-12-17 17:31:29.154789\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"7138d52f92d6\"\ndown_revision = \"f3d30db17bed\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_unique_constraint(\"uq_username\", \"pdp_users\", [\"username\"])\n op.create_unique_constraint(\"uq_role\", \"pdp_user_roles\", [\"role\"])\n\n\ndef downgrade():\n pass\n\nsrc/server/alembic/versions/6b8cf99be000_add_user_journal_table.py METASEP\n\"\"\"Add user journal table\n\nRevision ID: 6b8cf99be000\nRevises: 36c4ecbfd11a\nCreate Date: 2020-12-21 15:08:07.784568\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.sql import func\n\n\n# revision identifiers, used by Alembic.\nrevision = \"6b8cf99be000\"\ndown_revision = \"36c4ecbfd11a\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table(\n \"pdp_user_journal\",\n sa.Column(\"_id\", sa.Integer, primary_key=True),\n sa.Column(\"stamp\", sa.DateTime, nullable=False, server_default=func.now()),\n sa.Column(\"username\", sa.String(50), nullable=False),\n sa.Column(\"event_type\", sa.String(50)),\n sa.Column(\"detail\", sa.String(120)),\n )\n\n\ndef downgrade():\n op.drop_table('pdp_user_journal')\n\nsrc/server/alembic/versions/57b547e9b464_create_rfm_edges_table.py METASEP\n\"\"\"Create RFM edges table\n\nRevision ID: 57b547e9b464\nRevises: 494e064d69a3\nCreate Date: 2021-07-20 21:39:00.438116\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '57b547e9b464'\ndown_revision = '494e064d69a3'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table (\n \"rfm_edges\",\n sa.Column(\"component\", sa.String(), primary_key=True),\n sa.Column(\"edge_string\", sa.String(), nullable=False)\n )\n\n\ndef downgrade():\n op.drop_table(\"rfm_edges\")\n\nsrc/server/alembic/versions/494e064d69a3_tables_for_rfm_data.py METASEP\n\"\"\"Tables for RFM data\n\nRevision ID: 494e064d69a3\nRevises: d0841384d5d7\nCreate Date: 2021-07-20 19:45:29.418756\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '494e064d69a3'\ndown_revision = 'd0841384d5d7'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table (\n \"rfm_scores\",\n sa.Column(\"matching_id\", sa.Integer, primary_key=True),\n sa.Column(\"rfm_score\", sa.String(3), nullable=False)\n )\n\n op.create_table (\n \"rfm_mapping\",\n sa.Column(\"rfm_value\", sa.String(3), primary_key=True),\n sa.Column(\"rfm_label\", sa.String(), nullable=True),\n sa.Column(\"rfm_color\", sa.String(), nullable=True, default='0xe0e0e0')\n )\n\n\ndef downgrade():\n op.drop_table(\"rfm_scores\")\n op.drop_table(\"rfm_mapping\")\n\nsrc/server/alembic/versions/45a668fa6325_postgres_matching.py METASEP\n\"\"\"postgres matching\n\nRevision ID: 45a668fa6325\nRevises: fc7325372396\nCreate Date: 2022-02-10 16:19:13.283250\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\nfrom sqlalchemy.dialects import postgresql\n\n# revision identifiers, used by Alembic.\nrevision = '45a668fa6325'\ndown_revision = 'fc7325372396'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.create_table('manual_matches',\n sa.Column('source_type_1', sa.String(), nullable=False),\n sa.Column('source_id_1', sa.String(), nullable=False),\n sa.Column('source_type_2', sa.String(), nullable=False),\n sa.Column('source_id_2', sa.String(), nullable=False),\n sa.PrimaryKeyConstraint('source_type_1', 'source_id_1', 'source_type_2', 'source_id_2')\n )\n op.create_table('salesforcecontacts',\n sa.Column('_id', sa.Integer(), nullable=False),\n sa.Column('contact_id', sa.String(), nullable=True),\n sa.Column('first_name', sa.String(), nullable=True),\n sa.Column('last_name', sa.String(), nullable=True),\n sa.Column('account_name', sa.String(), nullable=True),\n sa.Column('mailing_country', sa.String(), nullable=True),\n sa.Column('mailing_street', sa.String(), nullable=True),\n sa.Column('mailing_city', sa.String(), nullable=True),\n sa.Column('mailing_state_province', sa.String(), nullable=True),\n sa.Column('mailing_zip_postal_code', sa.String(), nullable=True),\n sa.Column('phone', sa.String(), nullable=True),\n sa.Column('mobile', sa.String(), nullable=True),\n sa.Column('email', sa.String(), nullable=True),\n sa.Column('json', postgresql.JSONB(astext_type=sa.Text()), nullable=True),\n sa.Column('created_date', sa.DateTime(), nullable=True),\n sa.PrimaryKeyConstraint('_id')\n )\n op.create_table('shelterluvpeople',\n sa.Column('_id', sa.Integer(), nullable=False),\n sa.Column('firstname', sa.String(), nullable=True),\n sa.Column('lastname', sa.String(), nullable=True),\n sa.Column('id', sa.String(), nullable=True),\n sa.Column('internal_id', sa.String(), nullable=True),\n sa.Column('associated', sa.String(), nullable=True),\n sa.Column('street', sa.String(), nullable=True),\n sa.Column('apartment', sa.String(), nullable=True),\n sa.Column('city', sa.String(), nullable=True),\n sa.Column('state', sa.String(), nullable=True),\n sa.Column('zip', sa.String(), nullable=True),\n sa.Column('email', sa.String(), nullable=True),\n sa.Column('phone', sa.String(), nullable=True),\n sa.Column('animal_ids', postgresql.JSONB(astext_type=sa.Text()), nullable=True),\n sa.Column('json', postgresql.JSONB(astext_type=sa.Text()), nullable=True),\n sa.Column('created_date', sa.DateTime(), nullable=True),\n sa.PrimaryKeyConstraint('_id')\n )\n op.create_table('volgistics',\n sa.Column('_id', sa.Integer(), nullable=False),\n sa.Column('number', sa.String(), nullable=True),\n sa.Column('last_name', sa.String(), nullable=True),\n sa.Column('first_name', sa.String(), nullable=True),\n sa.Column('middle_name', sa.String(), nullable=True),\n sa.Column('complete_address', sa.String(), nullable=True),\n sa.Column('street_1', sa.String(), nullable=True),\n sa.Column('street_2', sa.String(), nullable=True),\n sa.Column('street_3', sa.String(), nullable=True),\n sa.Column('city', sa.String(), nullable=True),\n sa.Column('state', sa.String(), nullable=True),\n sa.Column('zip', sa.String(), nullable=True),\n sa.Column('all_phone_numbers', sa.String(), nullable=True),\n sa.Column('home', sa.String(), nullable=True),\n sa.Column('work', sa.String(), nullable=True),\n sa.Column('cell', sa.String(), nullable=True),\n sa.Column('email', sa.String(), nullable=True),\n sa.Column('json', postgresql.JSONB(astext_type=sa.Text()), nullable=True),\n sa.Column('created_date', sa.DateTime(), nullable=True),\n sa.PrimaryKeyConstraint('_id')\n )\n op.create_index('idx_pdp_contacts_source_type_and_id', 'pdp_contacts', ['source_type', 'source_id'], unique=False)\n op.create_index(op.f('ix_pdp_contacts_mobile'), 'pdp_contacts', ['mobile'], unique=False)\n op.create_index(op.f('idx_pdp_contacts_lower_first_name'), 'pdp_contacts', [sa.text('lower(first_name)')], unique=False)\n op.create_index(op.f('idx_pdp_contacts_lower_last_name'), 'pdp_contacts', [sa.text('lower(last_name)')], unique=False)\n op.create_index(op.f('idx_pdp_contacts_lower_email'), 'pdp_contacts', [sa.text('lower(email)')], unique=False)\n # ### end Alembic commands ###\n\n\ndef downgrade():\n # ### commands auto generated by Alembic - please adjust! ###\n op.drop_index(op.f('ix_pdp_contacts_lower_email'), table_name='pdp_contacts')\n op.drop_index(op.f('ix_pdp_contacts_lower_last_name'), table_name='pdp_contacts')\n op.drop_index(op.f('ix_pdp_contacts_lower_first_name'), table_name='pdp_contacts')\n op.drop_index(op.f('ix_pdp_contacts_mobile'), table_name='pdp_contacts')\n op.drop_index('idx_pdp_contacts_source_type_and_id', table_name='pdp_contacts')\n op.drop_table('volgistics')\n op.drop_table('shelterluvpeople')\n op.drop_table('salesforcecontacts')\n op.drop_table('manual_matches')\n # ### end Alembic commands ###\n\nsrc/server/alembic/versions/41da831646e4_pdp_users_role_fk_from_roles.py METASEP\n\"\"\"pdp_users.role FK from roles\n\nRevision ID: 41da831646e4\nRevises: 72d50d531bd5\nCreate Date: 2020-12-16 15:53:28.514053\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"41da831646e4\"\ndown_revision = \"72d50d531bd5\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.drop_column(\"pdp_users\", \"role\")\n op.add_column(\n \"pdp_users\", sa.Column(\"role\", sa.Integer, sa.ForeignKey(\"pdp_user_roles._id\"))\n )\n\n\ndef downgrade():\n pass\n\nsrc/server/alembic/versions/40be910424f0_update_rfm_mapping_remove_rfm_edges.py METASEP\n\"\"\"Update rfm_mapping, remove rfm_edges\n\nRevision ID: 40be910424f0\nRevises: 57b547e9b464\nCreate Date: 2021-08-08 17:26:40.622536\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '40be910424f0'\ndown_revision = '57b547e9b464'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.drop_table(\"rfm_edges\") # Unneeded, unused\n op.add_column('rfm_mapping',\n sa.Column('rfm_text_color', sa.String())\n )\n\n\ndef downgrade():\n op.create_table (\n \"rfm_edges\",\n sa.Column(\"component\", sa.String(), primary_key=True),\n sa.Column(\"edge_string\", sa.String(), nullable=False)\n )\n\nsrc/server/alembic/versions/36c4ecbfd11a_add_pdp_users_full_name.py METASEP\n\"\"\"Add pdp_users full_name\n\nRevision ID: 36c4ecbfd11a\nRevises: 7138d52f92d6\nCreate Date: 2020-12-18 15:28:17.367718\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = \"36c4ecbfd11a\"\ndown_revision = \"7138d52f92d6\"\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.add_column(\"pdp_users\", sa.Column(\"full_name\", sa.String))\n\n\ndef downgrade():\n op.drop_column(\"pdp_users\", \"full_name\")\n\nsrc/server/alembic/versions/05e0693f8cbb_key_value_table.py METASEP\n\"\"\"key/value table\n\nRevision ID: 05e0693f8cbb\nRevises: 6b8cf99be000\nCreate Date: 2021-03-18 11:35:43.512082\n\n\"\"\"\nfrom alembic import op\nimport sqlalchemy as sa\n\n\n# revision identifiers, used by Alembic.\nrevision = '05e0693f8cbb'\ndown_revision = '6b8cf99be000'\nbranch_labels = None\ndepends_on = None\n\n\ndef upgrade():\n op.create_table(\n 'kv_unique',\n sa.Column('_id', sa.Integer, primary_key=True),\n sa.Column('keycol', sa.String(50), nullable=False, unique=True),\n sa.Column('valcol', sa.String(65536), nullable=True),\n )\n \n # op.create_index('kvk_ix', 'kv_unique', ['key'], unique=True)\n\n\ndef downgrade():\n op.drop_table('kv_unique')\n\nsrc/server/rfm_funcs/test_rfm.py METASEP\n# This function is meant to test the RFM create_scores.py function. \n\n'''\nThings needed\n1. Create mock data\n a. Mock data must be realistic\n b. mock data must have 5^3 possibilities for RFM score, i.e., 1 RFM score each. \n c. Therefore we need 125 unique rows.\n d. Recency needs to have at least 5 different dates\n e. Frequency needs to have at least 5 different IDs\n f. Monetary needs to have at least 5 different amounts\n g. Each subject ID will get an RFM score. \n2. create_scores.py will accept this mock data and then generate a new RFM score\n3. final step of this function will perform a jaccard similarity analysis to determine if the vectors\nmatch where the result should be exatly 1.0\n\n'''\n\n\nsrc/server/rfm_funcs/rfm_functions.py METASEP\n# rfm_funcs\n\n### A number of RFM functions which are called by the main create_scores function.\n\n# def date_difference(my_date, query_date):\n# '''\n# This function takes in a single date from the donations dataframe (per row) and compares the difference between that date and the date in which matching occurs.\n# I.e. pipeline matching should provide a query_date so that this can work.\n# '''\n# from datetime import datetime, date\n\n# d1 = datetime.strptime(str(my_date), \"%Y-%m-%d\")\n# d2 = datetime.strptime(str(query_date), \"%Y-%m-%d\")\n# diff = (d2 - d1)\n# return diff\n\n\n\n\n\ndef rfm_concat(days_score, frequency_score, amount_score):\n '''\n This function takes in three pandas.series columns and returns a concatenated version of each score for a total rfm score.\n Assumes that arg1 are Recency, arg2 are Frequency and arg3 are Monetary values\n arg1: pandas.series\n arg2: pandas.series\n arg3: pandas.series\n\n\n '''\n def concat(a, b, c):\n return int(f\"{a}{b}{c}\")\n\n rfm_score = list()\n for ii, jj, kk in zip(days_score, frequency_score, amount_score):\n rfm_score.append(concat(ii,jj,kk))\n\n\n\n return rfm_score\n\n\n\ndef merge_series(list1, list2):\n '''\n This function takes in two tuples and merges them into a list of tuples.\n '''\n merged_list = tuple(zip(list(list1), list(list2)))\n return merged_list\n\n\n\ndef create_bins_dict(recency_edges, frequency_edges, monetary_edges):\n '''\n Create_bins_dict-- creates dictionaries for each edge and label pairing\n This function takes in user defined bin edges and respective labels per each bin edge. User should\n input a list of edges and labels in corresponding order. A set of edges and bins for each score should be entered.\n\n e.g.\n recency_edges = np.array([0, 1., 2.,4., 10.])\n '''\n\n recency_dict = {}\n recency_labels = list(5,4,3,2,1)\n for ii,jj in zip(recency_labels, recency_edges):\n recency_dict[\"{0}\".format(ii)] = jj\n\n frequency_dict = {}\n frequency_labels= list(1,2,3,4,5)\n for tt,kk in zip(frequency_labels, frequency_edges):\n frequency_dict[\"{0}\".format(tt)] = kk\n\n\n monetary_dict = {}\n monetary_labels = list(1,2,3,4,5)\n for ww,hh in zip(monetary_labels, monetary_edges):\n monetary_dict[\"{0}\".format(ww)] = hh\n\n\n return recency_dict, frequency_dict, monetary_dict\n\nsrc/server/rfm_funcs/create_scores.py METASEP\nfrom config import engine\nfrom flask import current_app\nimport traceback\n\nimport pandas as pd\nimport numpy as np\nfrom datetime import datetime, date\nfrom collections import Counter\nimport dateutil\n\n\nimport structlog\nlogger = structlog.get_logger()\n\ndef date_difference(my_date, max_date):\n '''\n This function takes in a single date from the donations dataframe (per row) and compares the difference between that date and the date in which matching occurs.\n I.e. pipeline matching should provide a query_date so that this can work.\n '''\n\n d1 = datetime.strptime(str(my_date), \"%Y-%m-%d\")\n d2 = datetime.strptime(str(max_date), \"%Y-%m-%d\")\n diff = (d2 - d1)\n return diff\n\n\ndef create_scores():\n '''\n (used to) require query date as input-- must be string in the following format \"%Y-%m-%d\"\n returns a list of matching_ids and scores as tuples\n will also insert rfm scores into rfm_scores table----see src/server/api/admin_api.py\n '''\n \n # We calculate query_date below in frequncy\n\n with engine.connect() as connection:\n logger.debug(\"running create_scores()\")\n # read in data from database via pull_donations_for_rfm() func (reads in as a list of tuples)\n df = pd.read_sql(\n \"\"\"\n select pc.matching_id, s.amount, s.close_date \n from salesforcedonations s \n inner join pdp_contacts pc on pc.source_id = s.contact_id and pc.source_type = 'salesforcecontacts'\n where pc.archived_date is null order by matching_id\n \"\"\"\n , connection)\n df = pd.DataFrame(df, columns=['matching_id', 'amount', 'close_date'])\n\n from api.admin_api import read_rfm_edges, insert_rfm_scores # Avoid circular import issues\n\n rfm_dict = read_rfm_edges()\n\n if len(rfm_dict) == 3: # r,f,m\n\n try:\n\n recency_labels = [5,4,3,2,1]\n recency_bins = list(rfm_dict['r'].values()) #imported from table\n\n frequency_labels = [1,2,3,4,5]\n frequency_bins = list(rfm_dict['f'].values()) #imported from table\n\n monetary_labels = [1,2,3,4,5]\n monetary_bins = list(rfm_dict['m'].values()) #imported from table\n\n\n ########################## recency #########################################\n\n donations_past_year = df\n donations_past_year['close_date'] =pd.to_datetime(donations_past_year['close_date']).dt.date\n\n # calculate date difference between input date and individual row close date\n\n days = []\n max_close_date = donations_past_year['close_date'].max()\n for ii in donations_past_year['close_date']:\n days.append(date_difference(ii, max_close_date))\n donations_past_year['days_since'] = days\n\n grouped_past_year = donations_past_year.groupby('matching_id').agg({'days_since': ['min']}).reset_index()\n logger.debug(grouped_past_year.head())\n \n grouped_past_year[('days_since', 'min')]= grouped_past_year[('days_since', 'min')].dt.days\n\n max_maybe = grouped_past_year[('days_since', 'min')].max()\n\n real_max = max(max_maybe, max(recency_bins)+1 )\n\n recency_bins.append(real_max)\n\n\n grouped_past_year['recency_score'] = pd.cut(grouped_past_year[('days_since','min')], bins= recency_bins, labels=recency_labels, include_lowest = True)\n grouped_past_year.rename(columns={('recency_score', ''): 'recency_score'})\n\n ################################## frequency ###############################\n\n query_date = df['close_date'].max()\n\n df['close_date'] = pd.DatetimeIndex(df['close_date'])\n\n df_grouped = df.groupby(['matching_id', pd.Grouper(key = 'close_date', freq = 'Q')]).count().max(level=0)\n\n df_freq = df.loc[df['close_date'] > pd.Timestamp(query_date) - pd.Timedelta( \"365 days\") ] #pd.DatetimeIndex(df['close_date'] - pd.Timedelta( \"30 days\") )\n\n df_grouped = df_freq.groupby(['matching_id']).count()\n\n df_grouped = df_grouped.reset_index()\n\n frequency_bins.append(np.inf)\n\n df_frequency = df_grouped[['matching_id' , 'amount']] # amount is a placeholder as the groupby step just gives a frequency count, the value doesn't correspond to donation monetary amount.\n\n df_frequency = df_frequency.rename(columns = {'amount':'frequency'}) #renaming amount to frequency\n\n df_frequency['frequency_score'] = pd.cut(df_frequency['frequency'],\n bins = frequency_bins, labels=frequency_labels, include_lowest=False)\n \n ################################## amount ##################################\n\n # Need to score people with R, M but not F as a 1\n\n monetary_bins.append(np.inf)\n\n df_amount = df.groupby(df['matching_id'], as_index=False).amount.max()\n\n df_amount['amount_score'] = pd.cut(df_amount['amount'], bins= monetary_bins, include_lowest=True, labels = monetary_labels)\n\n # raise ValueError # Just to test exception handling\n\n # Concatenate rfm scores\n # merge monetary df and frequency df\n df_semi = df_amount.merge(df_frequency, left_on='matching_id', right_on= 'matching_id', how='left')\n logger.debug(grouped_past_year.head())\n logger.debug(df_semi.head())\n \n df_semi['frequency_score'] = df_semi['frequency_score'].fillna(1)\n\n df_final = df_semi.merge(grouped_past_year, left_on='matching_id', right_on= 'matching_id', how='left') # merge monetary/frequency dfs to recency df\n\n # import function: rfm_concat, which will catenate integers as a string and then convert back to a single integer\n from rfm_funcs.rfm_functions import rfm_concat\n rfm_score = rfm_concat(df_final[('recency_score'), ''], df_final['frequency_score'], df_final['amount_score'])\n\n # Append rfm score to final df\n df_final['rfm_score'] = rfm_score\n\n from rfm_funcs.rfm_functions import merge_series\n score_tuples = merge_series((df_final['matching_id']), df_final['rfm_score'])\n\n except Exception as e:\n logger.error(e)\n trace_back_string = traceback.format_exc()\n logger.error(trace_back_string)\n return 0\n\n try:\n insert_rfm_scores(score_tuples)\n except Exception as e:\n logger.error(e)\n trace_back_string = traceback.format_exc()\n logger.error(trace_back_string)\n return 0\n\n return len(score_tuples) # Not sure there's anything to do with them at this point\n\n else: # Didn't get len == 3\n logger.error(\"rfm_edges missing from DB or malformed. Could not perform rfm scoring\")\n return 0\n\nsrc/server/rfm_funcs/create_bins.py METASEP\ndef create_bins(data, query_date):\n '''This script will take table data and bin edges for RFM scores for all PAWS donations\n\n query_date = date data was queried\n '''\n\n import pandas as pd\n import numpy as np\n import jenkspy\n from datetime import datetime, date\n import os\n\n\n\n ####\n # read in data from database as list of tuples\n df = pull_donations_for_rfm()\n df = pd.DataFrame(df, columns=['matching_id', 'amount', 'close_date'])\n\n donations_df['Close_Date'] =pd.to_datetime(df['Close_Date']).dt.date\n\n ##################################################################################\n # Calculate recency bins\n from recency_bins import recency_bins\n recency_bins, quantile_scores= recency_bins(donations_df, query_date)\n\n ###################################################################################\n # Calculate frequency bins\n from frequency_bins import frequency_bins\n\n jenks_frequency_bins, human_frequency_bins = frequency_bins(donations_df)\n\n\n def checkIfDuplicates(listOfElems):\n ''' Check if given list contains any duplicates '''\n for elem in listOfElems:\n if listOfElems.count(elem) > 1:\n return True\n return False\n\n duplicats_bool = checkIfDuplicates(jenks_frequency_bins)\n if duplicates_bool == True:\n final_frequency_bins = human_frequency_bins\n\n ###################################################################################\n # Calculate Amount bins\n from amount_bins import amount_bins\n\n amount_jenks_bins, human_amount_bins = amount_bins(donations_df)\n\n\n\n ###################################################################################\n # Write bins to dict\n bins_dict = {}\n\nsrc/server/rfm_funcs/__init__.py METASEP\n\nsrc/server/pipeline/log_db.py METASEP\nfrom datetime import datetime\nimport json\nfrom sqlalchemy.sql import text\nfrom flask import current_app\n\nfrom sqlalchemy.dialects.postgresql import insert\nfrom sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey, exc, select\n\nfrom config import engine\nimport structlog\nlogger = structlog.get_logger()\n\nmetadata = MetaData()\n\nex_stat = Table(\"execution_status\", metadata, autoload=True, autoload_with=engine)\n\n# Alembic version bfb1262d3195\n\n# CREATE TABLE public.execution_status (\n# \t\"_id\" serial NOT NULL,\n# \tjob_id int4 NOT NULL,\n# \tstage varchar(32) NOT NULL,\n# \tstatus varchar(32) NOT NULL,\n# \tdetails varchar(128) NOT NULL,\n# \tupdate_stamp timestamp NOT NULL DEFAULT now(),\n# \tCONSTRAINT execution_status_pkey null\n# );\n\n\n\ndef log_exec_status(job_id: str, exec_stage: str, exec_status: str, job_details: str):\n \"\"\"Log execution status (job_id, status, job_details) to DB \"\"\"\n\n with engine.connect() as connection:\n ins_stmt = insert(ex_stat).values( # Postgres-specific insert() supporting ON CONFLICT \n job_id = job_id,\n stage = exec_stage, \n status = exec_status,\n details = json.dumps(job_details)\n )\n\n # If key already present in DB, do update instead \n upsert = ins_stmt.on_conflict_do_update(\n constraint='uq_job_id',\n set_=dict( stage = exec_stage, status = exec_status, details = json.dumps(job_details))\n )\n\n try:\n connection.execute(upsert)\n except Exception as e:\n logger.error(\"Insert/Update failed, Execution status\")\n logger.error(e)\n\n\n\nsrc/server/pipeline/flow_script.py METASEP\nimport time\nimport traceback\n\nfrom api import admin_api\nfrom config import engine\nfrom flask import current_app\nfrom models import (\n ManualMatches,\n PdpContacts,\n SalesForceContacts,\n ShelterluvPeople,\n Volgistics,\n)\nfrom networkx import Graph, connected_components\nfrom sqlalchemy import (\n Column,\n Integer,\n MetaData,\n Table,\n and_,\n delete,\n func,\n insert,\n or_,\n select,\n text,\n update,\n)\n\nfrom pipeline import log_db\nimport structlog\nlogger = structlog.get_logger()\n\ndef start_flow():\n start = time.time()\n job_id = admin_api.start_job()\n job_outcome = None\n trace_back_string = None\n\n if not job_id:\n logger.info(\"Failed to get job_id\")\n return \"busy\"\n\n try:\n log_db.log_exec_status(job_id, \"start_flow\", \"executing\", \"\")\n\n with engine.begin() as conn:\n # Here's how we match:\n # 1. Clear pdp_contacts (the old matches).\n # 2. Go through each raw data source table (e.g. salesforcecontacts,\n # volgistics) and copy the latest data for each contact into\n # pdp_contacts.\n # 3. Execute a join of pdp_contacts to itself using names, emails,\n # phone numbers, etc. to get a list of pairs of pdp_contacts ids\n # that \"match.\"\n # 4. Join manual_matches to pdp_contacts to get the pdp_contacts ids\n # of our manual matches.\n #\n # Steps 3 and 4 both produce lists of pairs of ids. Next we need to\n # associate an id with each group of matches. Note that if A matches\n # B and B matches C, then A and C should get the same match id. We\n # can thus think of \"matches\" as edges in a graph of id vertices,\n # and match groups as connected components in that graph. So:\n #\n # 5. Load all the matches into a Graph() and compute its connected\n # components.\n # 6. Update each row in pdp_contacts to give it a match id\n # corresponding to its connected componenet.\n\n logger.debug(\"Clearing pdp_contacts to prepare for match\")\n reset_pdp_contacts_with_unmatched(conn)\n\n logger.debug(\"Computing automatic matches\")\n automatic_matches = get_automatic_matches(conn)\n logger.debug(\"Computing manual matches\")\n manual_matches = get_manual_matches(conn)\n\n match_graph = Graph()\n logger.debug(\"Adding automatic matches to graph\")\n match_graph.add_edges_from(automatic_matches)\n logger.debug(\"Adding manual matches to graph\")\n match_graph.add_edges_from(manual_matches)\n logger.debug(\"Processing graph\")\n match_groups = connected_components(match_graph)\n\n logger.debug(\"Updating pdp_contacts with match ids\")\n update_matching_ids(match_groups, conn)\n\n logger.debug(\"Finished flow script run\")\n job_outcome = \"completed\"\n log_db.log_exec_status(job_id, \"flow\", \"complete\", \"\")\n\n except Exception as e:\n logger.error(e)\n trace_back_string = traceback.format_exc()\n logger.error(trace_back_string)\n\n finally:\n if job_outcome != \"completed\":\n\n log_db.log_exec_status(job_id, \"flow\", \"error\", trace_back_string)\n logger.error(\n \"Uncaught error status, setting job status to 'error' \"\n )\n job_outcome = \"error\"\n return \"error\"\n\n logger.info(\n \"Pipeline execution took %s seconds \", format(time.time() - start)\n )\n return job_outcome\n\n\ndef reset_pdp_contacts_with_unmatched(conn):\n conn.execute(delete(PdpContacts))\n conn.execute(SalesForceContacts.insert_into_pdp_contacts())\n conn.execute(Volgistics.insert_into_pdp_contacts())\n conn.execute(ShelterluvPeople.insert_into_pdp_contacts())\n\n\ndef name_to_array(n):\n delims = text(\"'( and | & |, | )'\")\n return func.regexp_split_to_array(\n func.lower(func.translate(n, text(\"'\\\"'\"), text(\"''\"))), delims\n )\n\n\ndef compare_names(n1, n2):\n return name_to_array(n1).bool_op(\"&&\")(name_to_array(n2))\n\n\ndef get_automatic_matches(conn):\n pc1 = PdpContacts.__table__.alias()\n pc2 = PdpContacts.__table__.alias()\n match_stmt = select(pc1.c._id, pc2.c._id).join(\n pc2,\n and_(\n or_(\n and_(\n compare_names(pc1.c.first_name, pc2.c.first_name),\n compare_names(pc1.c.last_name, pc2.c.last_name),\n ),\n and_(\n compare_names(pc1.c.first_name, pc2.c.last_name),\n compare_names(pc1.c.last_name, pc2.c.first_name),\n ),\n ),\n or_(\n func.lower(pc1.c.email) == func.lower(pc2.c.email),\n pc1.c.mobile == pc2.c.mobile,\n ),\n # This ensures we don't get e.g. every row matching itself\n pc1.c._id < pc2.c._id,\n ),\n )\n return conn.execute(match_stmt)\n\n\ndef get_manual_matches(conn):\n pc1 = PdpContacts.__table__.alias()\n pc2 = PdpContacts.__table__.alias()\n stmt = (\n select(pc1.c._id, pc2.c._id)\n .select_from(ManualMatches)\n .join(\n pc1,\n (ManualMatches.source_type_1 == pc1.c.source_type)\n & (ManualMatches.source_id_1 == pc1.c.source_id),\n )\n .join(\n pc2,\n (ManualMatches.source_type_2 == pc2.c.source_type)\n & (ManualMatches.source_id_2 == pc2.c.source_id),\n )\n )\n return conn.execute(stmt)\n\n\ndef update_matching_ids(match_groups, conn):\n # match_groups doesn't include singletons, but we should still each\n # unmatched record gets a sane matching_id (that is, its own id)\n matching_ids_by_id = {id: id for (id,) in conn.execute(select(PdpContacts._id))}\n for match_group in match_groups:\n matching_id = min(match_group)\n for id in match_group:\n matching_ids_by_id[id] = matching_id\n\n # Load all the new id/matching-id pairs into a temp table so that we can do\n # a fast UPDATE FROM to set all the matching ids in pdp_contacts\n temp_table = Table(\n \"_tmp_matching_id_update\",\n MetaData(), # this is a temp table, we don't want to affect our knowledge of \"real\" tables\n Column(\"_id\", Integer, primary_key=True),\n Column(\"matching_id\", Integer),\n prefixes=[\"TEMPORARY\"],\n postgresql_on_commit=\"DROP\",\n )\n temp_table.create(conn)\n conn.execute(\n insert(temp_table),\n [\n {\"_id\": _id, \"matching_id\": matching_id}\n for (_id, matching_id) in matching_ids_by_id.items()\n ],\n )\n conn.execute(\n update(PdpContacts)\n .where(PdpContacts._id == temp_table.c._id)\n .values(matching_id=temp_table.c.matching_id)\n )\n\nsrc/server/pipeline/__init__.py METASEP\n\nsrc/server/db_setup/base_users.py METASEP\nfrom config import engine\nfrom api import user_api\nimport sqlalchemy as sa\nimport os\n\nimport structlog\nlogger = structlog.get_logger()\n\n\ntry: \n from secrets_dict import BASEUSER_PW, BASEEDITOR_PW, BASEADMIN_PW\nexcept ImportError: \n # Not running locally\n logger.debug(\"Couldn't get BASE user PWs from file, trying environment **********\")\n from os import environ\n\n try:\n BASEUSER_PW = environ['BASEUSER_PW']\n BASEEDITOR_PW = environ['BASEEDITOR_PW']\n BASEADMIN_PW = environ['BASEADMIN_PW']\n\n except KeyError:\n # Nor in environment\n # You're SOL for now\n logger.error(\"Couldn't get secrets from file or environment\")\n\n\n\n\n\nfrom sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey\n\nmetadata = sa.MetaData()\n\n\ndef create_base_roles():\n with engine.connect() as connection:\n result = connection.execute(\"select role from pdp_user_roles\")\n role_count = len(result.fetchall())\n if role_count == 0:\n connection.execute(\"INSERT into pdp_user_roles values (1, 'user') \")\n connection.execute(\"INSERT into pdp_user_roles values (2, 'editor') \")\n connection.execute(\"INSERT into pdp_user_roles values (9, 'admin') \")\n\n else:\n logger.debug(\"%d roles already present in DB, not creating\", role_count)\n\n\ndef create_base_users(): # TODO: Just call create_user for each\n \"\"\" Creates three users (user, editor, admin) for testing\n Password for each is user name with 'pw' appended \"\"\"\n with engine.connect() as connection:\n\n result = connection.execute(\"select user from pdp_users\")\n user_count = len(result.fetchall())\n if user_count == 0:\n\n logger.debug(\"Creating base users\")\n\n pu = sa.Table(\"pdp_users\", metadata, autoload=True, autoload_with=engine)\n\n # user\n pw_hash = user_api.hash_password(BASEUSER_PW)\n ins_stmt = pu.insert().values(\n username=\"base_user\", full_name=\"Base User\", password=pw_hash, active=\"Y\", role=1,\n )\n connection.execute(ins_stmt)\n\n # INactive user\n # Reuse pw hash\n ins_stmt = pu.insert().values(\n username=\"base_user_inact\", full_name=\"Inactive User\", password=pw_hash, active=\"N\", role=1,\n )\n connection.execute(ins_stmt)\n\n # editor\n pw_hash = user_api.hash_password(BASEEDITOR_PW)\n ins_stmt = pu.insert().values(\n username=\"base_editor\", full_name=\"Base Editor\", password=pw_hash, active=\"Y\", role=2,\n )\n connection.execute(ins_stmt)\n\n # admin\n pw_hash = user_api.hash_password(BASEADMIN_PW)\n ins_stmt = pu.insert().values(\n username=\"base_admin\", full_name=\"Base Admin\", password=pw_hash, active=\"Y\", role=9,\n )\n connection.execute(ins_stmt)\n\n else:\n logger.debug(\"%d users already present in DB, not creating\", user_count)\n\n\ndef populate_rfm_mapping_table(overwrite=False):\n \"\"\"Populate the rfm_mapping table if empty or overwrite is True.\"\"\"\n\n with engine.connect() as connection:\n\n def table_empty():\n result = connection.execute(\"select count(*) from rfm_mapping;\")\n row_count = result.fetchone()[0]\n return row_count == 0\n\n\n if overwrite or table_empty():\n logger.debug(\"Populating rfm_mapping table\")\n\n if not table_empty():\n logger.debug(\"'overwrite=True', truncating rfm_mapping table\")\n connection.execute(\"TRUNCATE TABLE rfm_mapping;\")\n\n\n if os.path.exists('server'): # running locally\n file_path = os.path.normpath('server/alembic/populate_rfm_mapping.sql')\n\n elif os.path.exists('alembic'): # running on Docker\n file_path = os.path.normpath('alembic/populate_rfm_mapping.sql')\n\n else: #\n logger.error(\"ERROR: Can't find a path to populate script!!!!!! CWD is %s\", os.getcwd())\n return\n\n\n\n logger.debug(\"Loading sql script at \" + file_path)\n\n f = open(file_path)\n populate_query = f.read()\n f.close()\n\n result = connection.execute(populate_query)\n\n if table_empty():\n logger.error(\"ERROR: rfm_mapping table WAS NOT POPULATED\")\n\n else:\n logger.debug(\"rfm_mapping table already populated; overwrite not True so not changing.\")\n\n return\n\n\ndef populate_sl_event_types():\n \"\"\"If not present, insert values for shelterluv animal event types.\"\"\"\n with engine.connect() as connection:\n result = connection.execute(\"select id from sl_event_types\")\n type_count = len(result.fetchall())\n if type_count == 0:\n print(\"Inserting SL event types\")\n connection.execute(\"\"\"INSERT into sl_event_types values \n (1, 'Outcome.Adoption'),\n (2, 'Outcome.Foster'),\n (3, 'Outcome.ReturnToOwner'),\n (4, 'Intake.AdoptionReturn'),\n (5, 'Intake.FosterReturn'); \"\"\") \n else:\n logger.debug(\"%d event types already present in DB, not creating\", type_count)\n\nsrc/server/db_setup/__init__.py METASEP\n\nsrc/server/api/user_api.py METASEP\nfrom hashlib import pbkdf2_hmac\nfrom os import urandom, environ\nimport pytest, codecs, random\nfrom datetime import datetime\n\nfrom api.api import user_api\nfrom sqlalchemy.sql import text\nfrom config import engine\nfrom flask import request, redirect, jsonify, current_app, abort, json\n\nfrom sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey, exc, select\n\nfrom api import jwt_ops\n\nimport structlog\nlogger = structlog.get_logger()\n\n\nmetadata = MetaData()\n\n# Salt for hashing storing passwords\nSALT_LENGTH = 32\n\n\n# Keep a journal of user activity\ndef log_user_action(user, event_class, detail):\n \"\"\" Write log entry to db \"\"\"\n\n puj = Table(\"pdp_user_journal\", metadata, autoload=True, autoload_with=engine)\n\n with engine.connect() as connection:\n ins_stmt = puj.insert().values(username=user, event_type=event_class, detail=detail)\n\n try:\n connection.execute(ins_stmt)\n except Exception as e:\n logger.error(e)\n\ndef password_is_strong(password):\n \"\"\" Check plain-text password against strength rules.\"\"\"\n\n def has_digit(test_string):\n \"\"\"Test if any character is a digit.\"\"\"\n for c in test_string:\n if c.isdigit():\n return True\n return False\n\n def has_alpha(test_string):\n \"\"\"Test if any character is alphabetic.\"\"\"\n for c in test_string:\n if c.isalpha():\n return True\n return False\n\n if (len(password) > 11 \n # and has_alpha(password) \n # and has_digit(password)\n ):\n return True\n \n else:\n return False\n\n\ndef hash_password(password):\n \"\"\" Generate salt+hash for storing in db\"\"\"\n salt = urandom(SALT_LENGTH)\n hash = pbkdf2_hmac(\"sha512\", bytes(password, \"utf8\"), salt, 500000)\n hash_for_db = salt + hash\n return hash_for_db\n\n\ndef check_password(password, salty_hash):\n \"\"\"Check presented cleartext password against DB-type salt+hash, return True if they match\"\"\"\n salt = salty_hash[0:SALT_LENGTH]\n hash = salty_hash[SALT_LENGTH:]\n # Use salt from db to hash what user gave us\n pw_bytes = bytes(password, \"utf8\")\n hash_of_presented = pbkdf2_hmac(\"sha512\", pw_bytes, salt, 500000)\n return hash.hex() == hash_of_presented.hex()\n\n\n### No authorization required ############################\n\n\n@user_api.route(\"/api/user/test\", methods=[\"GET\"])\ndef user_test():\n \"\"\" Liveness test, does not require JWT \"\"\"\n logger.debug(\"/api/user/test\")\n return jsonify((\"OK from User Test @ \" + str(datetime.now())))\n\n\n@user_api.route(\"/api/user/test_log\", methods=[\"GET\"])\ndef user_test_log_error():\n \"\"\"Does not require JWT - see various log levels\"\"\"\n\n logger.debug(\"debug: /api/user/test_log_error\") \n logger.info(\"info: /api/user/test_log_error\") \n logger.warn(\"warn: /api/user/test_log_error\")\n logger.error(\"error: /api/user/test_log_error\")\n logger.critical(\"critical: /api/user/test_log_error\")\n return jsonify((\"Generated log entries as various levals @ \" + str(datetime.now())))\n\n\n\n\n@user_api.route(\"/api/user/test_fail\", methods=[\"GET\"])\ndef user_test_fail():\n \"\"\" Liveness test, always fails with 401\"\"\"\n return jsonify(\"Here's your failure\"), 401\n\n\n@user_api.route(\"/api/user/timeout/\", methods=[\"GET\"])\ndef user_override_timeout(new_timeout):\n \"\"\" Override JWT expiration setting for testing.\n Allows a value up to JWT_MAX_TIMEOUT (from app.py).\n This will affect, of course, only future tokens.\n \"\"\" \n if (new_timeout > current_app.config[\"JWT_MAX_TIMEOUT\"] ) : \n new_timeout = current_app.config[\"JWT_MAX_TIMEOUT\"]\n current_app.config[\"JWT_ACCESS_TOKEN_EXPIRES\"] = new_timeout\n return jsonify(\"Timeout set to \" + str(new_timeout) + \" seconds\"), 200\n\n\n@user_api.route(\"/api/user/login\", methods=[\"POST\"])\ndef user_login():\n \"\"\" Validate user in db, return JWT if legit and active.\n Expects json-encoded form data {\"username\" :, \"password\": }\n \"\"\"\n\n def dummy_check():\n \"\"\"Perform a fake password hash check to take as much time as a real one.\"\"\"\n pw_bytes = bytes('password', \"utf8\")\n check_password('password', pw_bytes)\n\n try:\n post_dict = json.loads(request.data)\n username = post_dict[\"username\"]\n presentedpw = post_dict[\"password\"]\n except:\n dummy_check() # Take the same time as with well-formed requests \n return jsonify(\"Bad credentials\"), 401\n\n if not (isinstance(username, str) and isinstance(presentedpw, str) ):\n dummy_check() # Take the same time as with well-formed requests \n return jsonify(\"Bad credentials\"), 401 # Don't give us ints, arrays, etc.\n\n\n with engine.connect() as connection:\n\n pwhash = None\n s = text(\n \"\"\"select password, pdp_user_roles.role, active \n from pdp_users \n left join pdp_user_roles on pdp_users.role = pdp_user_roles._id \n where username=:u \"\"\"\n )\n s = s.bindparams(u=username)\n result = connection.execute(s)\n\n if result.rowcount: # Did we get a match on username?\n pwhash, role, is_active = result.fetchone()\n else:\n log_user_action(username, \"Failure\", \"Invalid username\")\n dummy_check()\n return jsonify(\"Bad credentials\"), 401\n\n if is_active.lower() == \"y\" and check_password(presentedpw, pwhash):\n # Yes, user is active and password matches\n token = jwt_ops.create_token(username, role)\n log_user_action(username, \"Success\", \"Logged in\")\n return token\n\n else:\n log_user_action(username, \"Failure\", \"Bad password or inactive\")\n # No dummy_check needed as we ran a real one to get here\n return jsonify(\"Bad credentials\"), 401\n\n\n### Unexpired JWT required ############################\n\n\n@user_api.route(\"/api/user/test_auth\", methods=[\"GET\"])\n@jwt_ops.jwt_required()\ndef user_test_auth():\n \"\"\" Liveness test, requires JWT \"\"\"\n sysname = '?' # Ensure we are talking to the expected host\n try:\n sysname = environ['computername'] \n except:\n pass\n \n try:\n sysname = environ['HOSTNAME']\n except:\n pass\n\n\n return jsonify((\"OK from User Test - Auth [\" + sysname + \"] @\" + str(datetime.now())))\n\n\n# Logout is not strictly needed; client can just delete JWT, but good for logging\n@user_api.route(\"/api/user/logout\", methods=[\"POST\"])\n@jwt_ops.jwt_required()\ndef user_logout():\n \n user_name = ''\n\n old_jwt = jwt_ops.validate_decode_jwt() \n\n # If token bad, should be handled & error message sent by jwt_required() and we won't get here\n if old_jwt:\n user_name = old_jwt['sub']\n \n # Log the request\n log_user_action(user_name, \"Success\", \"Logged out\")\n return jsonify(\"Logged out\")\n\n\n# Generate a new access token \n\n@user_api.route(\"/api/user/refresh\", methods=[\"GET\"])\n@jwt_ops.jwt_required()\ndef user_refresh():\n \"\"\" If user still active, send back an access_token with a new expiration stamp \"\"\"\n old_jwt = jwt_ops.validate_decode_jwt() \n\n # If token bad, should be handled & error message sent by jwt_required() and we won't get here\n if old_jwt:\n user_name = old_jwt['sub']\n with engine.connect() as connection:\n\n s = text( \"\"\"select active from pdp_users where username=:u \"\"\" )\n s = s.bindparams(u=user_name)\n result = connection.execute(s)\n\n if result.rowcount: # Did we get a match on username?\n is_active = result.fetchone()\n else:\n log_user_action(user_name, \"Failure\", \"Valid JWT presented for refesh attempt on unknown username\")\n return jsonify(\"Bad credentials\"), 401\n\n if is_active[0].lower() == 'y': # In the user DB and still Active?\n token = jwt_ops.create_token(user_name,old_jwt['role'])\n log_user_action(user_name, \"Success\", \"Refreshed token\")\n return token\n\n else:\n return jsonify(\"Bad credentials\"), 401\n\n\n\n### Unexpired *Admin* JWT required ############################\n\n\n@user_api.route(\"/api/admin/user/create\", methods=[\"POST\"])\n@jwt_ops.admin_required\ndef user_create():\n \"\"\"Create user record \n \n Requires admin role \n\n Form POST JSON Parameters \n ----------\n username : str \n full_name : str \n password : str \n role : str, one of `user`, `editor`, `admin` \n\n Returns \n ----------\n User created: 201 + username \n Invalid role: 422 + \"Bad role\" \n Duplicate user: 409 + DB error \n\n \"\"\"\n\n try:\n post_dict = json.loads(request.data)\n new_user = post_dict[\"username\"]\n fullname = post_dict[\"full_name\"]\n userpw = post_dict[\"password\"]\n user_role = post_dict[\"role\"]\n except:\n return jsonify(\"Missing one or more parameters\"), 400\n\n\n requesting_user = jwt_ops.validate_decode_jwt()['sub'] \n\n pw_hash = hash_password(userpw)\n\n pu = Table(\"pdp_users\", metadata, autoload=True, autoload_with=engine)\n pr = Table(\"pdp_user_roles\", metadata, autoload=True, autoload_with=engine)\n\n with engine.connect() as connection:\n\n # Build dict of roles\n role_dict = {}\n r = select((pr.c.role, pr.c._id))\n rr = connection.execute(r)\n fa = rr.fetchall()\n for row in fa:\n role_dict[row[0]] = row[1] # TODO: possible to do directly in sa?\n\n try:\n role_val = role_dict[user_role]\n except KeyError as e:\n logger.error(\"Role not found %s\", e)\n log_user_action(\n requesting_user,\n \"Failure\",\n \"Bad role (\" + user_role + \") in user_create for \" + new_user,\n )\n return jsonify(\"Bad role\"), 422\n\n ins_stmt = pu.insert().values(\n # _id=default,\n username=new_user,\n password=pw_hash,\n full_name=fullname,\n active=\"Y\",\n role=role_val,\n )\n\n try:\n connection.execute(ins_stmt)\n except exc.IntegrityError as e: # Uniqueness violation\n return jsonify(e.orig.pgerror), 409\n\n # if created, 201\n log_user_action(\n requesting_user, \"Success\", \"Created user \" + new_user + \" with role: \" + user_role,\n )\n return jsonify(new_user), 201\n\n\n@user_api.route(\"/api/admin/user/get_user_count\", methods=[\"GET\"])\n@jwt_ops.admin_required\ndef get_user_count():\n \"\"\"Return number of records in pdp_users table \"\"\"\n with engine.connect() as connection:\n s = text(\"select count(user) from pdp_users;\")\n result = connection.execute(s)\n user_count = result.fetchone()\n return jsonify(user_count[0])\n\n\n@user_api.route(\"/api/admin/user/check_name\", methods=[\"POST\"])\n@jwt_ops.admin_required\ndef check_username():\n \"\"\"Return 1 if username exists already, else 0.\"\"\"\n\n try:\n post_dict = json.loads(request.data)\n test_username = post_dict[\"username\"]\n except:\n return jsonify(\"Missing username\"), 400\n\n with engine.connect() as connection:\n\n s = text( \"\"\"select count(username) from pdp_users where username=:u \"\"\" )\n s = s.bindparams(u=test_username)\n result = connection.execute(s)\n\n if result.rowcount: # As we're doing a count() we *should* get a result\n user_exists = result.fetchone()[0]\n else:\n log_user_action(test_username, \"Failure\", \"Error when checking username\")\n return jsonify(\"Error checking username\"), 500\n\n return jsonify(user_exists)\n\n@user_api.route(\"/api/admin/user/update\", methods=[\"POST\"])\n@jwt_ops.admin_required \ndef user_update():\n \"\"\"Update existing user record \n \"\"\"\n \n \n try:\n post_dict = json.loads(request.data)\n username = post_dict[\"username\"]\n except:\n return jsonify(\"Must specify username\"), 400\n\n update_dict = {}\n\n # Need to be a bit defensive here & select what we want instead of taking what we're given\n for key in [\"full_name\", \"active\", \"role\", \"password\"]: \n try:\n val = post_dict[key]\n update_dict[key] = val\n except:\n pass\n\n \n if not update_dict:\n logger.debug(\"Update called with nothing to update\")\n return jsonify(\"No changed items specified\") # If nothing to do, declare victory\n\n if \"password\" in update_dict.keys():\n\n if password_is_strong(update_dict['password']):\n update_dict['password'] = hash_password(update_dict['password'])\n else:\n return jsonify(\"Password too weak\") \n\n\n # We have a variable number of columns to update.\n # We could generate a text query on the fly, but this seems the perfect place to use the ORM \n # and let it handle the update for us. \n\n from sqlalchemy import update\n from sqlalchemy.orm import Session, sessionmaker\n\n Session = sessionmaker(engine)\n\n session = Session() \n # #TODO: Figure out why context manager doesn't work or do try/finally\n\n pr = Table(\"pdp_user_roles\", metadata, autoload=True, autoload_with=engine)\n\n if (\"role\" in update_dict.keys()): # We are changing the role\n\n # Build dict of roles {name: id}\n role_dict = {}\n r = select((pr.c.role, pr.c._id))\n rr = session.execute(r)\n fa = rr.fetchall()\n for row in fa:\n role_dict[row[0]] = row[1] \n\n logger.debug(\"Found %d roles\", len(role_dict))\n # Replace the role name with the corresponding id for update\n try:\n # We could verify that the role is actually different - doesn't seem worth the effort\n update_dict[\"role\"] = role_dict[update_dict[\"role\"]]\n except KeyError:\n logger.error(\"Attempted to change user '%s' to invalid role '%s'\", username, update_dict[\"role\"])\n session.close()\n return jsonify(\"Invalid role specified\"), 400\n\n PU = Table(\"pdp_users\", metadata, autoload=True, autoload_with=engine)\n\n stmt = update(PU).where(PU.columns.username == username).values(update_dict).\\\n execution_options(synchronize_session=\"fetch\")\n\n result = session.execute(stmt)\n\n session.commit()\n session.close()\n\n return jsonify(\"Updated\")\n\n\n@user_api.route(\"/api/admin/user/get_users\", methods=[\"GET\"])\n@jwt_ops.admin_required\ndef user_get_list():\n \"\"\"Return list of users\"\"\"\n\n with engine.connect() as connection:\n\n s = text(\n \"\"\" select username, full_name, active, pr.role\n from pdp_users as pu \n left join pdp_user_roles as pr on pu.role = pr._id\n order by username \"\"\"\n )\n result = connection.execute(s)\n\n query_result_json = [dict(row) for row in result]\n\n return jsonify(query_result_json), 200\n\n@user_api.route(\"/api/admin/user/get_info/\", methods=[\"GET\"])\n@jwt_ops.admin_required \ndef user_get_info(username):\n \"\"\"Return info on a specified user\"\"\"\n\n with engine.connect() as connection:\n\n s = text(\n \"\"\" select username, full_name, active, pr.role\n from pdp_users as pu \n left join pdp_user_roles as pr on pu.role = pr._id\n where username=:u\n \"\"\"\n )\n s = s.bindparams(u=username)\n result = connection.execute(s)\n \n if result.rowcount: \n user_row = result.fetchone()\n else:\n log_user_action(username, \"Failure\", \"Error when getting user info\")\n return jsonify(\"Username not found\"), 400\n\n return jsonify( dict(zip(result.keys(), user_row)) ), 200\nsrc/server/api/jwt_ops.py METASEP\nfrom functools import wraps\nfrom flask import Flask, jsonify, request, current_app\nfrom flask_jwt_extended import (\n JWTManager,\n jwt_required,\n create_access_token,\n get_jwt_identity,\n verify_jwt_in_request,\n get_jwt\n \n)\n\nfrom app import app, jwt\n\n# Wraps funcs to require admin role to execute\ndef admin_required(fn):\n @wraps(fn)\n def wrapper(*args, **kwargs):\n verify_jwt_in_request()\n claims = get_jwt()\n if claims[\"role\"] != \"admin\": \n return jsonify(msg=\"Admins only!\"), 403\n else:\n return fn(*args, **kwargs)\n\n return wrapper\n\ndef create_token(username, accesslevel):\n \"\"\" Create a JWT *access* token for the specified user ('sub:') and role ('role:'). \n \"\"\"\n # Identity can be any data that is json serializable, we just use username\n addl_claims = {'role': accesslevel}\n new_token = create_access_token(identity=username, additional_claims=addl_claims)\n return jsonify(access_token=new_token)\n\n\ndef validate_decode_jwt():\n \"\"\" If valid, return jwt fields as a dictionary, else None \"\"\"\n jwtdict = None\n try:\n jwtdict = verify_jwt_in_request()[1]\n except:\n pass # Wasn't valid - either expired or failed validation \n\n return jwtdict\n\nsrc/server/api/internal_api.py METASEP\nfrom datetime import datetime\n\nimport structlog\nfrom flask import jsonify\n\nfrom api.API_ingest import ingest_sources_from_api, salesforce_contacts\nfrom api.api import internal_api\nfrom rfm_funcs.create_scores import create_scores\nfrom api.API_ingest import updated_data\n\nlogger = structlog.get_logger()\n\n### Internal API endpoints can only be accessed from inside the cluster;\n### they are blocked by location rule in NGINX config\n\n\n# Verify that this can only be accessed from within cluster\n@internal_api.route(\"/api/internal/test\", methods=[\"GET\"])\ndef user_test():\n \"\"\" Liveness test, does not require JWT \"\"\"\n return jsonify((\"OK from INTERNAL Test @ \" + str(datetime.now())))\n\n\n@internal_api.route(\"/api/internal/test/test\", methods=[\"GET\"])\ndef user_test2():\n \"\"\" Liveness test, does not require JWT \"\"\"\n return jsonify((\"OK from INTERNAL test/test @ \" + str(datetime.now())))\n\n\n@internal_api.route(\"/api/internal/ingestRawData\", methods=[\"GET\"])\ndef ingest_raw_data():\n try:\n ingest_sources_from_api.start()\n except Exception as e:\n logger.error(e)\n\n return jsonify({'outcome': 'OK'}), 200\n\n\n@internal_api.route(\"/api/internal/create_scores\", methods=[\"GET\"])\ndef hit_create_scores():\n logger.info(\"Hitting create_scores() \")\n tuple_count = create_scores()\n logger.info(\"create_scores() processed %s scores\", str(tuple_count) )\n return jsonify(200)\n\n\n@internal_api.route(\"/api/internal/get_updated_data\", methods=[\"GET\"])\ndef get_contact_data():\n logger.debug(\"Calling get_updated_contact_data()\")\n contact_json = updated_data.get_updated_contact_data()\n logger.debug(\"Returning %d contact records\", len(contact_json) )\n return jsonify(contact_json), 200\n\nsrc/server/api/file_uploader.py METASEP\nimport pandas as pd\nfrom config import engine\nfrom donations_importer import validate_import_sfd\nfrom flask import current_app\nfrom models import ManualMatches, SalesForceContacts, ShelterluvPeople, Volgistics\nfrom shifts_importer import validate_import_vs\nfrom werkzeug.utils import secure_filename\n\nimport structlog\nlogger = structlog.get_logger()\n\nSUCCESS_MSG = \"Uploaded Successfully!\"\n\n\ndef validate_and_arrange_upload(file):\n logger.info(\"Start uploading file: %s \", file.filename)\n filename = secure_filename(file.filename)\n file_extension = filename.rpartition(\".\")[2]\n with engine.begin() as conn:\n determine_upload_type(file, file_extension, conn)\n\n\ndef determine_upload_type(file, file_extension, conn):\n # Yes, this method of discovering what kind of file we have by looking at\n # the extension and columns is silly. We'd like to get more of our data from\n # automatically pulling from vendor APIs directly, in which case we'd know\n # what kind of data we had.\n if file_extension == \"csv\":\n logger.debug(\"File extension is CSV\")\n df = pd.read_csv(file, dtype=\"string\")\n\n if {\"salesforcecontacts\", \"volgistics\", \"shelterluvpeople\"}.issubset(df.columns):\n logger.debug(\"File appears to be salesforcecontacts, volgistics, or shelterluvpeople (manual)\")\n ManualMatches.insert_from_df(df, conn)\n return\n elif {\"Animal_ids\", \"Internal-ID\"}.issubset(df.columns):\n logger.debug(\"File appears to be shelterluvpeople\")\n ShelterluvPeople.insert_from_df(df, conn)\n return\n\n if file_extension == \"xlsx\":\n excel_file = pd.ExcelFile(file)\n if {\"Master\", \"Service\"}.issubset(excel_file.sheet_names):\n logger.debug(\"File appears to be Volgistics\")\n # Volgistics\n validate_import_vs(file, conn)\n Volgistics.insert_from_file(excel_file, conn)\n return\n\n df = pd.read_excel(excel_file)\n if \"Contact ID 18\" in df.columns:\n # Salesforce something-or-other\n if \"Amount\" in df.columns:\n # Salesforce donations\n logger.debug(\"File appears to be Salesforce donations\")\n validate_import_sfd(file, conn)\n return\n else:\n # Salesforce contacts\n logger.debug(\"File appears to be Salesforce contacts\")\n SalesForceContacts.insert_from_file_df(df, conn)\n return\n\n logger.error(\"Don't know how to process file: %s\", file.filename)\nsrc/server/api/fake_data.py METASEP\n\"\"\" Fake data that can be returned when an API token is missing for local\n development, or for running pytest\n\n Shelterluv Data contains:\n Matched: Animal & Event End point\n\"\"\"\n\nshelterluv_data = {\n 'animals': {\n \"animal_details\": {\n '12345': {\n \"Age\": 24,\n \"DOBUnixTime\": 1568480456,\n \"Name\": \"Lola aka Fake Cat\",\n \"Type\": \"Cat\",\n \"Photos\":\n [\"https://images.unsplash.com/photo-1456926631375-92c8ce872def?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxzZWFyY2h8OHx8YW5pbWFsfGVufDB8fDB8fA%3D%3D&w=1000&q=80\"],\n \"Status\": \"Healthy In Home\",\n },\n },\n \"person_details\": {\n \"shelterluv_short_id\": 2,\n },\n },\n 'events': {\n '12345':[\n {\n 'AssociatedRecords': [\n {'Id': 12345, 'Type': 'Animal' },\n {'Id': 12345, 'Type': 'Person'},\n ],\n 'Subtype': 'Foster Home',\n 'Time': '1602694822',\n 'Type': 'Outcome.Adoption',\n 'User': 'Fake User',\n },\n ]\n },\n}\n\n\ndef sl_mock_data(end_point: str)-> dict:\n \"\"\" Shelterluv mock data.\n Takes the end_point as a str of `animals` or `events` and returns\n a dict representing a test data for that end_point.\n \"\"\"\n\n return shelterluv_data.get(end_point)\n\nsrc/server/api/common_api.py METASEP\nfrom api.api import common_api\nfrom config import engine\nfrom flask import jsonify , current_app\nfrom sqlalchemy.sql import text\nimport requests\nimport time\nfrom datetime import datetime\n\nimport structlog\nlogger = structlog.get_logger()\n\n\nfrom api.fake_data import sl_mock_data\n\ntry:\n from secrets_dict import SHELTERLUV_SECRET_TOKEN\nexcept ImportError:\n # Not running locally\n logger.debug(\"Couldn't get SHELTERLUV_SECRET_TOKEN from file, trying environment **********\")\n from os import getenv\n\n SHELTERLUV_SECRET_TOKEN = getenv('SHELTERLUV_SECRET_TOKEN')\n if not SHELTERLUV_SECRET_TOKEN:\n logger.warn(\"Couldn't get secrets from file or environment - defaulting to Fake Data\")\n\nfrom api import jwt_ops\n\n@common_api.route('/api/timeout_test/', methods=['GET'])\ndef get_timeout(duration):\n start = datetime.now().strftime(\"%H:%M:%S\");\n time.sleep(int(duration))\n\n stop = datetime.now().strftime(\"%H:%M:%S\");\n results = jsonify({'result': 'success', 'duration': duration, 'start': start, 'stop': stop})\n\n return results\n\n@common_api.route('/api/contacts/', methods=['GET'])\n@jwt_ops.jwt_required()\ndef get_contacts(search_text):\n with engine.connect() as connection:\n search_text = search_text.lower()\n\n names = search_text.split(\" \")\n if len(names) == 2:\n query = text(\"\"\"select pdp_contacts.*, rfm_scores.rfm_score, rfm_label, rfm_color, rfm_text_color\n from pdp_contacts \n left join rfm_scores on rfm_scores.matching_id = pdp_contacts.matching_id\n left join rfm_mapping on rfm_mapping.rfm_value = rfm_scores.rfm_score\n where archived_date is null AND ( (lower(first_name) like lower(:name1) and lower(last_name) like lower(:name2)) \n OR (lower(first_name) like lower(:name2) and lower(last_name) like lower(:name1)) )\n order by lower(last_name), lower(first_name)\"\"\")\n query_result = connection.execute(query, name1='{}%'.format(names[0]), name2='{}%'.format(names[1]))\n elif len(names) == 1:\n query = text(\"\"\"select pdp_contacts.*, rfm_scores.rfm_score, rfm_label, rfm_color, rfm_text_color\n from pdp_contacts \n left join rfm_scores on rfm_scores.matching_id = pdp_contacts.matching_id\n left join rfm_mapping on rfm_mapping.rfm_value = rfm_scores.rfm_score\n where archived_date is null AND ( lower(first_name) like lower(:search_text) \n OR lower(last_name) like lower(:search_text) )\n order by lower(last_name), lower(first_name)\"\"\")\n query_result = connection.execute(query, search_text='{}%'.format(search_text))\n\n query_result_json = [dict(row) for row in query_result]\n\n results = jsonify({'result': query_result_json})\n\n return results\n\n\n@common_api.route('/api/rfm/