{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \\n \\n\", \"diff --git a/docs/learn/101-use.md b/docs/learn/101-use.md\\nindex 283c1c1..2ec10f9 100644\\n--- a/docs/learn/101-use.md\\n+++ b/docs/learn/101-use.md\\n@@ -41,8 +41,7 @@ cd ./examples/todoapp\\n The example app contains encrypted secrets and other pre-configured inputs, here is how to decrypt them:\\n \\n ```sh\\n-curl -sfL https://releases.dagger.io/examples/key.txt >> ~/.config/dagger/keys.txt\\n-dagger input list\\n+dagger input list || curl -sfL https://releases.dagger.io/examples/key.txt >> ~/.config/dagger/keys.txt\\n ```\\n \\n **Step 4**: Deploy!\\n\", \"diff --git a/src/burnchains/burnchain.rs b/src/burnchains/burnchain.rs\\nindex 92105d6..60c608a 100644\\n--- a/src/burnchains/burnchain.rs\\n+++ b/src/burnchains/burnchain.rs\\n@@ -851,8 +851,26 @@ impl Burnchain {\\n );\\n \\n burnchain_db.store_new_burnchain_block(burnchain, indexer, &block)?;\\n- let block_height = block.block_height();\\n+ Burnchain::process_affirmation_maps(\\n+ burnchain,\\n+ burnchain_db,\\n+ indexer,\\n+ block.block_height(),\\n+ )?;\\n+\\n+ let header = block.header();\\n+ Ok(header)\\n+ }\\n \\n+ /// Update the affirmation maps for the previous reward cycle's commits.\\n+ /// This is a no-op unless the given burnchain block height falls on a reward cycle boundary. In that\\n+ /// case, the previous reward cycle's block commits' affirmation maps are all re-calculated.\\n+ pub fn process_affirmation_maps(\\n+ burnchain: &Burnchain,\\n+ burnchain_db: &mut BurnchainDB,\\n+ indexer: &B,\\n+ block_height: u64,\\n+ ) -> Result<(), burnchain_error> {\\n let this_reward_cycle = burnchain\\n .block_height_to_reward_cycle(block_height)\\n .unwrap_or(0);\\n@@ -872,10 +890,7 @@ impl Burnchain {\\n );\\n update_pox_affirmation_maps(burnchain_db, indexer, prev_reward_cycle, burnchain)?;\\n }\\n-\\n- let header = block.header();\\n-\\n- Ok(header)\\n+ Ok(())\\n }\\n \\n /// Hand off the block to the ChainsCoordinator _and_ process the sortition\\n\", \"diff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\\nindex da05e13..9231df3 100644\\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\\n@@ -154,6 +154,9 @@ public final class EventAppliers implements EventApplier {\\n register(\\n ProcessInstanceIntent.SEQUENCE_FLOW_TAKEN,\\n new ProcessInstanceSequenceFlowTakenApplier(elementInstanceState, processState));\\n+ register(\\n+ ProcessInstanceIntent.ELEMENT_MIGRATED,\\n+ new ProcessInstanceElementMigratedApplier(elementInstanceState));\\n }\\n \\n private void registerProcessInstanceCreationAppliers(final MutableProcessingState state) {\\ndiff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\\nindex e5a0f3a..d38358f 100644\\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\\n@@ -24,5 +24,16 @@ final class ProcessInstanceElementMigratedApplier\\n }\\n \\n @Override\\n- public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {}\\n+ public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {\\n+ elementInstanceState.updateInstance(\\n+ elementInstanceKey,\\n+ elementInstance ->\\n+ elementInstance\\n+ .getValue()\\n+ .setProcessDefinitionKey(value.getProcessDefinitionKey())\\n+ .setBpmnProcessId(value.getBpmnProcessId())\\n+ .setVersion(value.getVersion())\\n+ .setElementId(value.getElementId())\\n+ .setFlowScopeKey(value.getFlowScopeKey()));\\n+ }\\n }\\n\", \"diff --git a/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java b/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\\nindex 69b06b6..a4fcb77 100644\\n--- a/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\\n+++ b/restore/src/main/java/io/camunda/zeebe/restore/PartitionRestoreService.java\\n@@ -112,7 +112,7 @@ public class PartitionRestoreService {\\n SegmentedJournal.builder()\\n .withDirectory(dataDirectory.toFile())\\n .withName(partition.name())\\n- .withLastWrittenIndex(-1)\\n+ .withLastFlushedIndex(-1)\\n .build()) {\\n \\n resetJournal(checkpointPosition, journal);\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"c3b5dc77ff3d89d389f6f3a868b17d0a8ca63074\", \"2b01808ec86fe9d8b4a93141a1b7f95e11fd6010\", \"d7972da833257c073403dec3c2ac3a7f297e328a\", \"39d5d1cfe8d2210305df2c8fab4a4ae430732cf7\", \"5ffc5794808647de14f945141692be26ad143006\"]"},"types":{"kind":"string","value":"[\"test\", \"docs\", \"refactor\", \"feat\", \"fix\"]"}}},{"rowIdx":1368,"cells":{"commit_message":{"kind":"string","value":"run nix macos jobs on macos-13 to try and avoid SIP,reorder startup steps,skip if related view/hook/column of a filter is not found\n\nSigned-off-by: Pranav C ,brew tests/multiple darwin builds/gh enterprise,add --ignore-existing to all npx commands"},"diff":{"kind":"string","value":"[\"diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml\\nnew file mode 100644\\nindex 0000000..5be7d17\\n--- /dev/null\\n+++ b/.github/actionlint.yaml\\n@@ -0,0 +1,7 @@\\n+self-hosted-runner:\\n+ # Labels of self-hosted runner in array of strings.\\n+ labels: [macos-13]\\n+# Configuration variables in array of strings defined in your repository or\\n+# organization. `null` means disabling configuration variables check.\\n+# Empty array means no configuration variable is allowed.\\n+config-variables: null\\ndiff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml\\nindex e37346c..dce77e1 100644\\n--- a/.github/workflows/nix.yml\\n+++ b/.github/workflows/nix.yml\\n@@ -37,7 +37,7 @@ jobs:\\n - \\\"3.10\\\"\\n - \\\"3.11\\\"\\n include:\\n- - os: macos-latest\\n+ - os: macos-13\\n python-version: \\\"3.10\\\"\\n steps:\\n - name: checkout\\ndiff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml\\nindex 005a850..8db22e2 100644\\n--- a/.pre-commit-config.yaml\\n+++ b/.pre-commit-config.yaml\\n@@ -3,7 +3,7 @@ ci:\\n autofix_prs: false\\n autoupdate_commit_msg: \\\"chore(deps): pre-commit.ci autoupdate\\\"\\n skip:\\n- - actionlint\\n+ - actionlint-system\\n - deadnix\\n - just\\n - nixpkgs-fmt\\n@@ -17,9 +17,9 @@ default_stages:\\n - commit\\n repos:\\n - repo: https://github.com/rhysd/actionlint\\n- rev: v1.6.24\\n+ rev: v1.6.25\\n hooks:\\n- - id: actionlint\\n+ - id: actionlint-system\\n - repo: https://github.com/psf/black\\n rev: 23.3.0\\n hooks:\\n@@ -30,7 +30,7 @@ repos:\\n - id: nbstripout\\n exclude: .+/rendered/.+\\n - repo: https://github.com/codespell-project/codespell\\n- rev: v2.2.4\\n+ rev: v2.2.5\\n hooks:\\n - id: codespell\\n additional_dependencies:\\n\", \"diff --git a/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java b/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\\nindex 52fa3a9..d81c27a 100644\\n--- a/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/bootstrap/BrokerStartupProcess.java\\n@@ -50,21 +50,20 @@ public final class BrokerStartupProcess {\\n // must be executed before any disk space usage listeners are registered\\n result.add(new DiskSpaceUsageMonitorStep());\\n }\\n-\\n result.add(new MonitoringServerStep());\\n result.add(new BrokerAdminServiceStep());\\n+\\n result.add(new ClusterServicesCreationStep());\\n+ result.add(new ClusterServicesStep());\\n \\n result.add(new CommandApiServiceStep());\\n result.add(new SubscriptionApiStep());\\n-\\n- result.add(new ClusterServicesStep());\\n+ result.add(new LeaderManagementRequestHandlerStep());\\n \\n if (config.getGateway().isEnable()) {\\n result.add(new EmbeddedGatewayServiceStep());\\n }\\n \\n- result.add(new LeaderManagementRequestHandlerStep());\\n result.add(new PartitionManagerStep());\\n \\n return result;\\n\", \"diff --git a/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts b/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\\nindex 1515f88..6c250bd 100644\\n--- a/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\\n+++ b/packages/nocodb/src/lib/version-upgrader/ncFilterUpgrader.ts\\n@@ -21,7 +21,13 @@ export default async function ({ ncMeta }: NcUpgraderCtx) {\\n } else {\\n continue;\\n }\\n- if (filter.project_id != model.project_id) {\\n+\\n+ // skip if related model is not found\\n+ if (!model) {\\n+ continue;\\n+ }\\n+\\n+ if (filter.project_id !== model.project_id) {\\n await ncMeta.metaUpdate(\\n null,\\n null,\\n\", \"diff --git a/pipeline/brew/brew.go b/pipeline/brew/brew.go\\nindex ec27182..15ed189 100644\\n--- a/pipeline/brew/brew.go\\n+++ b/pipeline/brew/brew.go\\n@@ -1,5 +1,3 @@\\n-// Package brew implements the Pipe, providing formula generation and\\n-// uploading it to a configured repo.\\n package brew\\n \\n import (\\n@@ -10,13 +8,12 @@ import (\\n \\t\\\"strings\\\"\\n \\t\\\"text/template\\\"\\n \\n-\\t\\\"github.com/goreleaser/goreleaser/internal/artifact\\\"\\n-\\n \\t\\\"github.com/apex/log\\\"\\n \\n \\t\\\"github.com/goreleaser/goreleaser/checksum\\\"\\n \\t\\\"github.com/goreleaser/goreleaser/config\\\"\\n \\t\\\"github.com/goreleaser/goreleaser/context\\\"\\n+\\t\\\"github.com/goreleaser/goreleaser/internal/artifact\\\"\\n \\t\\\"github.com/goreleaser/goreleaser/internal/client\\\"\\n \\t\\\"github.com/goreleaser/goreleaser/pipeline\\\"\\n )\\n@@ -106,14 +103,14 @@ func doRun(ctx *context.Context, client client.Client) error {\\n \\t\\tartifact.And(\\n \\t\\t\\tartifact.ByGoos(\\\"darwin\\\"),\\n \\t\\t\\tartifact.ByGoarch(\\\"amd64\\\"),\\n-\\t\\t\\tartifact.ByGoarch(\\\"\\\"),\\n+\\t\\t\\tartifact.ByGoarm(\\\"\\\"),\\n \\t\\t\\tartifact.ByType(artifact.UploadableArchive),\\n \\t\\t),\\n \\t).List()\\n \\tif len(archives) == 0 {\\n \\t\\treturn ErrNoDarwin64Build\\n \\t}\\n-\\tif len(archives) > 0 {\\n+\\tif len(archives) > 1 {\\n \\t\\treturn ErrTooManyDarwin64Builds\\n \\t}\\n \\tvar path = filepath.Join(ctx.Config.Brew.Folder, ctx.Config.ProjectName+\\\".rb\\\")\\n@@ -145,8 +142,7 @@ func doBuildFormula(data templateData) (out bytes.Buffer, err error) {\\n }\\n \\n func dataFor(ctx *context.Context, client client.Client, artifact artifact.Artifact) (result templateData, err error) {\\n-\\tvar file = artifact.Path\\n-\\tsum, err := checksum.SHA256(file)\\n+\\tsum, err := checksum.SHA256(artifact.Path)\\n \\tif err != nil {\\n \\t\\treturn\\n \\t}\\n@@ -163,7 +159,7 @@ func dataFor(ctx *context.Context, client client.Client, artifact artifact.Artif\\n \\t\\tTag: ctx.Git.CurrentTag,\\n \\t\\tVersion: ctx.Version,\\n \\t\\tCaveats: ctx.Config.Brew.Caveats,\\n-\\t\\tFile: file,\\n+\\t\\tFile: artifact.Name,\\n \\t\\tSHA256: sum,\\n \\t\\tDependencies: ctx.Config.Brew.Dependencies,\\n \\t\\tConflicts: ctx.Config.Brew.Conflicts,\\ndiff --git a/pipeline/brew/brew_test.go b/pipeline/brew/brew_test.go\\nindex 7e513bf..9066935 100644\\n--- a/pipeline/brew/brew_test.go\\n+++ b/pipeline/brew/brew_test.go\\n@@ -9,6 +9,7 @@ import (\\n \\n \\t\\\"github.com/goreleaser/goreleaser/config\\\"\\n \\t\\\"github.com/goreleaser/goreleaser/context\\\"\\n+\\t\\\"github.com/goreleaser/goreleaser/internal/artifact\\\"\\n \\t\\\"github.com/goreleaser/goreleaser/internal/testlib\\\"\\n \\t\\\"github.com/stretchr/testify/assert\\\"\\n )\\n@@ -93,7 +94,8 @@ func TestRunPipe(t *testing.T) {\\n \\t\\tGit: context.GitInfo{\\n \\t\\t\\tCurrentTag: \\\"v1.0.1\\\",\\n \\t\\t},\\n-\\t\\tVersion: \\\"1.0.1\\\",\\n+\\t\\tVersion: \\\"1.0.1\\\",\\n+\\t\\tArtifacts: artifact.New(),\\n \\t\\tConfig: config.Project{\\n \\t\\t\\tDist: folder,\\n \\t\\t\\tProjectName: \\\"run-pipe\\\",\\n@@ -124,31 +126,53 @@ func TestRunPipe(t *testing.T) {\\n \\t\\tPublish: true,\\n \\t}\\n \\tvar path = filepath.Join(folder, \\\"bin.tar.gz\\\")\\n-\\tctx.AddBinary(\\\"darwinamd64\\\", \\\"bin\\\", \\\"bin\\\", path)\\n+\\tctx.Artifacts.Add(artifact.Artifact{\\n+\\t\\tName: \\\"bin.tar.gz\\\",\\n+\\t\\tPath: path,\\n+\\t\\tGoos: \\\"darwin\\\",\\n+\\t\\tGoarch: \\\"amd64\\\",\\n+\\t\\tType: artifact.UploadableArchive,\\n+\\t})\\n \\tclient := &DummyClient{}\\n \\tassert.Error(t, doRun(ctx, client))\\n \\tassert.False(t, client.CreatedFile)\\n \\n \\t_, err = os.Create(path)\\n \\tassert.NoError(t, err)\\n-\\tassert.NoError(t, doRun(ctx, client))\\n-\\tassert.True(t, client.CreatedFile)\\n \\n-\\tbts, err := ioutil.ReadFile(\\\"testdata/run_pipe.rb\\\")\\n-\\tassert.NoError(t, err)\\n-\\t// ioutil.WriteFile(\\\"testdata/run_pipe.rb\\\", []byte(client.Content), 0644)\\n+\\tt.Run(\\\"default git url\\\", func(tt *testing.T) {\\n+\\t\\tassert.NoError(tt, doRun(ctx, client))\\n+\\t\\tassert.True(tt, client.CreatedFile)\\n+\\n+\\t\\tbts, err := ioutil.ReadFile(\\\"testdata/run_pipe.rb\\\")\\n+\\t\\tassert.NoError(tt, err)\\n+\\t\\t// TODO: make writing this file toggleable somehow?\\n+\\t\\t// ioutil.WriteFile(\\\"testdata/run_pipe.rb\\\", []byte(client.Content), 0644)\\n+\\t\\tassert.Equal(tt, string(bts), client.Content)\\n+\\t})\\n \\n-\\tassert.Equal(t, string(bts), client.Content)\\n+\\tt.Run(\\\"github enterprise url\\\", func(tt *testing.T) {\\n+\\t\\tctx.Config.GitHubURLs.Download = \\\"http://github.example.org\\\"\\n+\\t\\tassert.NoError(tt, doRun(ctx, client))\\n+\\t\\tassert.True(tt, client.CreatedFile)\\n+\\n+\\t\\tbts, err := ioutil.ReadFile(\\\"testdata/run_pipe_enterprise.rb\\\")\\n+\\t\\tassert.NoError(tt, err)\\n+\\t\\t// TODO: make writing this file toggleable somehow?\\n+\\t\\t// ioutil.WriteFile(\\\"testdata/run_pipe_enterprise.rb\\\", []byte(client.Content), 0644)\\n+\\t\\tassert.Equal(tt, string(bts), client.Content)\\n+\\t})\\n }\\n \\n+// TODO: this test is irrelevant and can probavly be removed\\n func TestRunPipeFormatOverride(t *testing.T) {\\n \\tfolder, err := ioutil.TempDir(\\\"\\\", \\\"goreleasertest\\\")\\n \\tassert.NoError(t, err)\\n \\tvar path = filepath.Join(folder, \\\"bin.zip\\\")\\n \\t_, err = os.Create(path)\\n \\tassert.NoError(t, err)\\n-\\tvar ctx = &context.Context{\\n-\\t\\tConfig: config.Project{\\n+\\tvar ctx = context.New(\\n+\\t\\tconfig.Project{\\n \\t\\t\\tDist: folder,\\n \\t\\t\\tArchive: config.Archive{\\n \\t\\t\\t\\tFormat: \\\"tar.gz\\\",\\n@@ -166,9 +190,15 @@ func TestRunPipeFormatOverride(t *testing.T) {\\n \\t\\t\\t\\t},\\n \\t\\t\\t},\\n \\t\\t},\\n-\\t\\tPublish: true,\\n-\\t}\\n-\\tctx.AddBinary(\\\"darwinamd64\\\", \\\"bin\\\", \\\"bin\\\", path)\\n+\\t)\\n+\\tctx.Publish = true\\n+\\tctx.Artifacts.Add(artifact.Artifact{\\n+\\t\\tName: \\\"bin.zip\\\",\\n+\\t\\tPath: path,\\n+\\t\\tGoos: \\\"darwin\\\",\\n+\\t\\tGoarch: \\\"amd64\\\",\\n+\\t\\tType: artifact.UploadableArchive,\\n+\\t})\\n \\tclient := &DummyClient{}\\n \\tassert.NoError(t, doRun(ctx, client))\\n \\tassert.True(t, client.CreatedFile)\\n@@ -195,6 +225,40 @@ func TestRunPipeNoDarwin64Build(t *testing.T) {\\n \\tassert.False(t, client.CreatedFile)\\n }\\n \\n+func TestRunPipeMultipleDarwin64Build(t *testing.T) {\\n+\\tvar ctx = context.New(\\n+\\t\\tconfig.Project{\\n+\\t\\t\\tArchive: config.Archive{\\n+\\t\\t\\t\\tFormat: \\\"tar.gz\\\",\\n+\\t\\t\\t},\\n+\\t\\t\\tBrew: config.Homebrew{\\n+\\t\\t\\t\\tGitHub: config.Repo{\\n+\\t\\t\\t\\t\\tOwner: \\\"test\\\",\\n+\\t\\t\\t\\t\\tName: \\\"test\\\",\\n+\\t\\t\\t\\t},\\n+\\t\\t\\t},\\n+\\t\\t},\\n+\\t)\\n+\\tctx.Publish = true\\n+\\tctx.Artifacts.Add(artifact.Artifact{\\n+\\t\\tName: \\\"bin1\\\",\\n+\\t\\tPath: \\\"doesnt mather\\\",\\n+\\t\\tGoos: \\\"darwin\\\",\\n+\\t\\tGoarch: \\\"amd64\\\",\\n+\\t\\tType: artifact.UploadableArchive,\\n+\\t})\\n+\\tctx.Artifacts.Add(artifact.Artifact{\\n+\\t\\tName: \\\"bin2\\\",\\n+\\t\\tPath: \\\"doesnt mather\\\",\\n+\\t\\tGoos: \\\"darwin\\\",\\n+\\t\\tGoarch: \\\"amd64\\\",\\n+\\t\\tType: artifact.UploadableArchive,\\n+\\t})\\n+\\tclient := &DummyClient{}\\n+\\tassert.Equal(t, ErrTooManyDarwin64Builds, doRun(ctx, client))\\n+\\tassert.False(t, client.CreatedFile)\\n+}\\n+\\n func TestRunPipeBrewNotSetup(t *testing.T) {\\n \\tvar ctx = &context.Context{\\n \\t\\tConfig: config.Project{},\\n@@ -206,9 +270,8 @@ func TestRunPipeBrewNotSetup(t *testing.T) {\\n }\\n \\n func TestRunPipeBinaryRelease(t *testing.T) {\\n-\\tvar ctx = &context.Context{\\n-\\t\\tPublish: true,\\n-\\t\\tConfig: config.Project{\\n+\\tvar ctx = context.New(\\n+\\t\\tconfig.Project{\\n \\t\\t\\tArchive: config.Archive{\\n \\t\\t\\t\\tFormat: \\\"binary\\\",\\n \\t\\t\\t},\\n@@ -219,8 +282,15 @@ func TestRunPipeBinaryRelease(t *testing.T) {\\n \\t\\t\\t\\t},\\n \\t\\t\\t},\\n \\t\\t},\\n-\\t}\\n-\\tctx.AddBinary(\\\"darwinamd64\\\", \\\"foo\\\", \\\"bar\\\", \\\"baz\\\")\\n+\\t)\\n+\\tctx.Publish = true\\n+\\tctx.Artifacts.Add(artifact.Artifact{\\n+\\t\\tName: \\\"bin\\\",\\n+\\t\\tPath: \\\"doesnt mather\\\",\\n+\\t\\tGoos: \\\"darwin\\\",\\n+\\t\\tGoarch: \\\"amd64\\\",\\n+\\t\\tType: artifact.Binary,\\n+\\t})\\n \\tclient := &DummyClient{}\\n \\ttestlib.AssertSkipped(t, doRun(ctx, client))\\n \\tassert.False(t, client.CreatedFile)\\ndiff --git a/pipeline/brew/doc.go b/pipeline/brew/doc.go\\nnew file mode 100644\\nindex 0000000..2cddc12\\n--- /dev/null\\n+++ b/pipeline/brew/doc.go\\n@@ -0,0 +1,3 @@\\n+// Package brew implements the Pipe, providing formula generation and\\n+// uploading it to a configured repo.\\n+package brew\\ndiff --git a/pipeline/brew/testdata/run_pipe_enterprise.rb b/pipeline/brew/testdata/run_pipe_enterprise.rb\\nnew file mode 100644\\nindex 0000000..4b24ce0\\n--- /dev/null\\n+++ b/pipeline/brew/testdata/run_pipe_enterprise.rb\\n@@ -0,0 +1,33 @@\\n+class RunPipe < Formula\\n+ desc \\\"A run pipe test formula\\\"\\n+ homepage \\\"https://github.com/goreleaser\\\"\\n+ url \\\"http://github.example.org/test/test/releases/download/v1.0.1/bin.tar.gz\\\"\\n+ version \\\"1.0.1\\\"\\n+ sha256 \\\"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\\\"\\n+ \\n+ depends_on \\\"zsh\\\"\\n+ depends_on \\\"bash\\\"\\n+ \\n+ conflicts_with \\\"gtk+\\\"\\n+ conflicts_with \\\"qt\\\"\\n+\\n+ def install\\n+ bin.install \\\"foo\\\"\\n+ end\\n+\\n+ def caveats\\n+ \\\"don't do this\\\"\\n+ end\\n+\\n+ plist_options :startup => false\\n+\\n+ def plist; <<-EOS.undent\\n+ whatever\\n+ EOS\\n+ end\\n+\\n+ test do\\n+ system \\\"true\\\"\\n+ system \\\"#{bin}/foo -h\\\"\\n+ end\\n+end\\n\", \"diff --git a/docs/getting-started/getting-started.md b/docs/getting-started/getting-started.md\\nindex dc6db37..3ef9d0a 100644\\n--- a/docs/getting-started/getting-started.md\\n+++ b/docs/getting-started/getting-started.md\\n@@ -13,7 +13,7 @@ npm install -g @angular/cli\\n **Using `npx`**\\n \\n ```bash\\n-npx create-nx-workspace myworkspace\\n+npx --ignore-existing create-nx-workspace myworkspace\\n ```\\n \\n **Using `npm init`**\\ndiff --git a/docs/guides/react-and-angular.md b/docs/guides/react-and-angular.md\\nindex c1929a2..a5651ff 100644\\n--- a/docs/guides/react-and-angular.md\\n+++ b/docs/guides/react-and-angular.md\\n@@ -11,7 +11,7 @@ To show how Nx does it, let's build two applications (one in Angular, and one in\\n Let's start by creating a new Nx workspace. The easiest way to do this is to use npx.\\n \\n ```bash\\n-npx create-nx-workspace happynrwl --preset=empty\\n+npx --ignore-existing create-nx-workspace happynrwl --preset=empty\\n ```\\n \\n ## Creating an Angular Application\\ndiff --git a/docs/guides/react.md b/docs/guides/react.md\\nindex e1647fd..eac848e 100644\\n--- a/docs/guides/react.md\\n+++ b/docs/guides/react.md\\n@@ -16,13 +16,13 @@ Nx has first class support for React: you can create React applications and libr\\n Create a new Nx workspace. The easiest way to do it is to use npx.\\n \\n ```bash\\n-npx create-nx-workspace happynrwl --preset=empty\\n+npx --ignore-existing create-nx-workspace happynrwl --preset=empty\\n ```\\n \\n You can also create a workspace with a React application in place by running:\\n \\n ```bash\\n-npx create-nx-workspace happynrwl --preset=react\\n+npx --ignore-existing create-nx-workspace happynrwl --preset=react\\n ```\\n \\n ## Generating a React Application\\ndiff --git a/docs/tutorial/01-create-application.md b/docs/tutorial/01-create-application.md\\nindex ea87ecf..967a56e 100644\\n--- a/docs/tutorial/01-create-application.md\\n+++ b/docs/tutorial/01-create-application.md\\n@@ -7,7 +7,7 @@ In this tutorial you will use Nx to build a full-stack application out of common\\n **Start by creating a new workspace.**\\n \\n ```bash\\n-npx create-nx-workspace myorg\\n+npx --ignore-existing create-nx-workspace myorg\\n ```\\n \\n When asked about 'preset', select `empty`.\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"54cb6d4643b4a072ff997592a7fa14a69a6c068d\", \"3e0c4cbf91fe5efc9b93baba93e4df93ef4ab5cd\", \"ab1e60a97c6d5c688dacbd23bca40cb8f20c4ac3\", \"f433bcb59c36571e22d4e86c612e0a6a52f73c09\", \"fc9af4d0b93d69be4e201ffb18da04324e8a4a87\"]"},"types":{"kind":"string","value":"[\"ci\", \"refactor\", \"fix\", \"feat\", \"docs\"]"}}},{"rowIdx":1369,"cells":{"commit_message":{"kind":"string","value":"add unit test for query API,switch QA to new testbench-1.x-prod\n\nIn order to use the new Testbench that is compatible with Zeebe 1.x\nversions, this switches the client id and secrets used by the QA stage.,refactor generate_completion,Publish crates,added components pages to typedoc output"},"diff":{"kind":"string","value":"[\"diff --git a/gateway/src/test/java/io/camunda/zeebe/gateway/api/util/StubbedBrokerClient.java b/gateway/src/test/java/io/camunda/zeebe/gateway/api/util/StubbedBrokerClient.java\\nindex 2d2d084..38261ad 100644\\n--- a/gateway/src/test/java/io/camunda/zeebe/gateway/api/util/StubbedBrokerClient.java\\n+++ b/gateway/src/test/java/io/camunda/zeebe/gateway/api/util/StubbedBrokerClient.java\\n@@ -25,6 +25,7 @@ import java.util.HashMap;\\n import java.util.List;\\n import java.util.Map;\\n import java.util.concurrent.CompletableFuture;\\n+import java.util.concurrent.TimeUnit;\\n import java.util.function.Consumer;\\n \\n public final class StubbedBrokerClient implements BrokerClient {\\n@@ -67,7 +68,15 @@ public final class StubbedBrokerClient implements BrokerClient {\\n @Override\\n public CompletableFuture> sendRequestWithRetry(\\n final BrokerRequest request, final Duration requestTimeout) {\\n- throw new UnsupportedOperationException(\\\"not implemented\\\");\\n+ final CompletableFuture> result = new CompletableFuture<>();\\n+\\n+ sendRequestWithRetry(\\n+ request,\\n+ (key, response) ->\\n+ result.complete(new BrokerResponse<>(response, Protocol.decodePartitionId(key), key)),\\n+ result::completeExceptionally);\\n+\\n+ return result.orTimeout(requestTimeout.toNanos(), TimeUnit.NANOSECONDS);\\n }\\n \\n @Override\\ndiff --git a/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryApiTest.java b/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryApiTest.java\\nnew file mode 100644\\nindex 0000000..ec9ec80\\n--- /dev/null\\n+++ b/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryApiTest.java\\n@@ -0,0 +1,91 @@\\n+/*\\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\\n+ * one or more contributor license agreements. See the NOTICE file distributed\\n+ * with this work for additional information regarding copyright ownership.\\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\\n+ * except in compliance with the Zeebe Community License 1.1.\\n+ */\\n+package io.camunda.zeebe.gateway.query;\\n+\\n+import static org.assertj.core.api.Assertions.assertThat;\\n+\\n+import io.camunda.zeebe.gateway.api.util.GatewayTest;\\n+import io.camunda.zeebe.gateway.cmd.BrokerErrorException;\\n+import io.camunda.zeebe.gateway.impl.broker.response.BrokerError;\\n+import io.camunda.zeebe.gateway.impl.broker.response.BrokerErrorResponse;\\n+import io.camunda.zeebe.gateway.impl.broker.response.BrokerResponse;\\n+import io.camunda.zeebe.gateway.query.impl.QueryApiImpl;\\n+import io.camunda.zeebe.protocol.Protocol;\\n+import io.camunda.zeebe.protocol.record.ErrorCode;\\n+import java.time.Duration;\\n+import java.util.concurrent.CompletionStage;\\n+import java.util.concurrent.ExecutionException;\\n+import org.junit.Test;\\n+import org.junit.runner.RunWith;\\n+import org.junit.runners.Parameterized;\\n+import org.junit.runners.Parameterized.Parameter;\\n+import org.junit.runners.Parameterized.Parameters;\\n+\\n+@RunWith(Parameterized.class)\\n+public final class QueryApiTest extends GatewayTest {\\n+ @Parameter(0)\\n+ public String name;\\n+\\n+ @Parameter(1)\\n+ public Querier querier;\\n+\\n+ @Parameters(name = \\\"{index}: {0}\\\")\\n+ public static Object[][] queries() {\\n+ return new Object[][] {\\n+ new Object[] {\\\"getBpmnProcessIdForProcess\\\", (Querier) QueryApi::getBpmnProcessIdFromProcess},\\n+ new Object[] {\\n+ \\\"getBpmnProcessIdForProcessInstance\\\",\\n+ (Querier) QueryApi::getBpmnProcessIdFromProcessInstance\\n+ },\\n+ new Object[] {\\\"getBpmnProcessIdForProcessJob\\\", (Querier) QueryApi::getBpmnProcessIdFromJob},\\n+ };\\n+ }\\n+\\n+ @Test\\n+ public void shouldGetBpmnProcessId() {\\n+ // given\\n+ final var key = Protocol.encodePartitionId(1, 1);\\n+ final var api = new QueryApiImpl(brokerClient);\\n+ final var timeout = Duration.ofSeconds(5);\\n+ final var stub = new QueryStub(new BrokerResponse<>(\\\"myProcess\\\", 1, 1));\\n+ stub.registerWith(brokerClient);\\n+\\n+ // when\\n+ final var result = querier.query(api, key, timeout);\\n+\\n+ // then\\n+ assertThat(result).succeedsWithin(timeout).isEqualTo(\\\"myProcess\\\");\\n+ }\\n+\\n+ @Test\\n+ public void shouldCompleteExceptionallyOnError() {\\n+ // given\\n+ final var key = Protocol.encodePartitionId(1, 1);\\n+ final var api = new QueryApiImpl(brokerClient);\\n+ final var timeout = Duration.ofSeconds(5);\\n+ final var stub =\\n+ new QueryStub(\\n+ new BrokerErrorResponse<>(\\n+ new BrokerError(ErrorCode.PARTITION_LEADER_MISMATCH, \\\"Leader mismatch\\\")));\\n+ stub.registerWith(brokerClient);\\n+\\n+ // when\\n+ final var result = querier.query(api, key, timeout);\\n+\\n+ // then\\n+ assertThat(result)\\n+ .failsWithin(timeout)\\n+ .withThrowableOfType(ExecutionException.class)\\n+ .havingRootCause()\\n+ .isInstanceOf(BrokerErrorException.class);\\n+ }\\n+\\n+ private interface Querier {\\n+ CompletionStage query(final QueryApi api, final long key, final Duration timeout);\\n+ }\\n+}\\ndiff --git a/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryStub.java b/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryStub.java\\nnew file mode 100644\\nindex 0000000..2f8334e\\n--- /dev/null\\n+++ b/gateway/src/test/java/io/camunda/zeebe/gateway/query/QueryStub.java\\n@@ -0,0 +1,31 @@\\n+/*\\n+ * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under\\n+ * one or more contributor license agreements. See the NOTICE file distributed\\n+ * with this work for additional information regarding copyright ownership.\\n+ * Licensed under the Zeebe Community License 1.1. You may not use this file\\n+ * except in compliance with the Zeebe Community License 1.1.\\n+ */\\n+package io.camunda.zeebe.gateway.query;\\n+\\n+import io.camunda.zeebe.gateway.api.util.StubbedBrokerClient;\\n+import io.camunda.zeebe.gateway.api.util.StubbedBrokerClient.RequestStub;\\n+import io.camunda.zeebe.gateway.impl.broker.response.BrokerResponse;\\n+import io.camunda.zeebe.gateway.query.impl.BrokerExecuteQuery;\\n+\\n+final class QueryStub implements RequestStub> {\\n+ private final BrokerResponse response;\\n+\\n+ public QueryStub(final BrokerResponse response) {\\n+ this.response = response;\\n+ }\\n+\\n+ @Override\\n+ public void registerWith(final StubbedBrokerClient gateway) {\\n+ gateway.registerHandler(BrokerExecuteQuery.class, this);\\n+ }\\n+\\n+ @Override\\n+ public BrokerResponse handle(final BrokerExecuteQuery request) throws Exception {\\n+ return response;\\n+ }\\n+}\\n\", \"diff --git a/Jenkinsfile b/Jenkinsfile\\nindex 176ab58..bead402 100644\\n--- a/Jenkinsfile\\n+++ b/Jenkinsfile\\n@@ -326,7 +326,7 @@ pipeline {\\n TAG = \\\"${env.VERSION}-${env.GIT_COMMIT}\\\"\\n DOCKER_GCR = credentials(\\\"zeebe-gcr-serviceaccount-json\\\")\\n ZEEBE_AUTHORIZATION_SERVER_URL = 'https://login.cloud.ultrawombat.com/oauth/token'\\n- ZEEBE_CLIENT_ID = 'W5a4JUc3I1NIetNnodo3YTvdsRIFb12w'\\n+ ZEEBE_CLIENT_ID = 'ELL8eP0qDkl6dxXVps0t51x2VkCkWf~p'\\n QA_RUN_VARIABLES = \\\"{\\\\\\\"zeebeImage\\\\\\\": \\\\\\\"${env.IMAGE}:${env.TAG}\\\\\\\", \\\\\\\"generationTemplate\\\\\\\": \\\\\\\"${params.GENERATION_TEMPLATE}\\\\\\\", \\\" +\\n \\\"\\\\\\\"channel\\\\\\\": \\\\\\\"Internal Dev\\\\\\\", \\\\\\\"branch\\\\\\\": \\\\\\\"${env.BRANCH_NAME}\\\\\\\", \\\\\\\"build\\\\\\\": \\\\\\\"${currentBuild.absoluteUrl}\\\\\\\", \\\" +\\n \\\"\\\\\\\"businessKey\\\\\\\": \\\\\\\"${currentBuild.absoluteUrl}\\\\\\\", \\\\\\\"processId\\\\\\\": \\\\\\\"qa-protocol\\\\\\\"}\\\"\\n@@ -341,7 +341,7 @@ pipeline {\\n withVault(\\n [vaultSecrets:\\n [\\n- [path : 'secret/common/ci-zeebe/testbench-secrets-int',\\n+ [path : 'secret/common/ci-zeebe/testbench-secrets-1.x-prod',\\n secretValues:\\n [\\n [envVar: 'ZEEBE_CLIENT_SECRET', vaultKey: 'clientSecret'],\\n\", \"diff --git a/src/lib.rs b/src/lib.rs\\nindex dfd8014..15850f7 100644\\n--- a/src/lib.rs\\n+++ b/src/lib.rs\\n@@ -1,11 +1,106 @@\\n //! Generates [Nushell](https://github.com/nushell/nushell) completions for [`clap`](https://github.com/clap-rs/clap) based CLIs\\n \\n-use clap::Command;\\n+use clap::{Arg, Command};\\n use clap_complete::Generator;\\n \\n /// Generate Nushell complete file\\n pub struct Nushell;\\n \\n+enum Argument {\\n+ Short(char),\\n+ Long(String),\\n+ ShortAndLong(char, String),\\n+ Positional(String, bool),\\n+}\\n+\\n+struct ArgumentLine {\\n+ arg: Argument,\\n+ takes_values: bool,\\n+ help: Option,\\n+}\\n+\\n+impl From<&Arg> for ArgumentLine {\\n+ fn from(arg: &Arg) -> Self {\\n+ let takes_values = arg\\n+ .get_num_args()\\n+ .map(|v| v.takes_values())\\n+ .unwrap_or(false);\\n+\\n+ let help = arg.get_help().map(|s| s.to_string());\\n+\\n+ if arg.is_positional() {\\n+ let id = arg.get_id().to_string();\\n+ let required = arg.is_required_set();\\n+ let arg = Argument::Positional(id, required);\\n+\\n+ return Self {\\n+ arg,\\n+ takes_values,\\n+ help,\\n+ };\\n+ }\\n+\\n+ let short = arg.get_short();\\n+ let long = arg.get_long();\\n+\\n+ match short {\\n+ Some(short) => match long {\\n+ Some(long) => Self {\\n+ arg: Argument::ShortAndLong(short, long.into()),\\n+ takes_values,\\n+ help,\\n+ },\\n+ None => Self {\\n+ arg: Argument::Short(short),\\n+ takes_values,\\n+ help,\\n+ },\\n+ },\\n+ None => match long {\\n+ Some(long) => Self {\\n+ arg: Argument::Long(long.into()),\\n+ takes_values,\\n+ help,\\n+ },\\n+ None => unreachable!(\\\"No short or long option found\\\"),\\n+ },\\n+ }\\n+ }\\n+}\\n+\\n+impl ToString for ArgumentLine {\\n+ fn to_string(&self) -> String {\\n+ let mut s = String::new();\\n+\\n+ match &self.arg {\\n+ Argument::Short(short) => s.push_str(format!(\\\" -{}\\\", short).as_str()),\\n+ Argument::Long(long) => s.push_str(format!(\\\" --{}\\\", long).as_str()),\\n+ Argument::ShortAndLong(short, long) => {\\n+ s.push_str(format!(\\\" --{}(-{})\\\", long, short).as_str())\\n+ }\\n+ Argument::Positional(positional, required) => {\\n+ s.push_str(format!(\\\" {}\\\", positional).as_str());\\n+\\n+ if !*required {\\n+ s.push('?');\\n+ }\\n+ }\\n+ }\\n+\\n+ if self.takes_values {\\n+ s.push_str(\\\": string\\\");\\n+ }\\n+\\n+ if let Some(help) = &self.help {\\n+ s.push_str(format!(\\\"\\\\t# {}\\\", help).as_str());\\n+ }\\n+\\n+ s.push('\\\\n');\\n+\\n+ s\\n+ }\\n+}\\n+\\n impl Generator for Nushell {\\n fn file_name(&self, name: &str) -> String {\\n format!(\\\"{}.nu\\\", name)\\n@@ -37,51 +132,18 @@ fn generate_completion(completions: &mut String, cmd: &Command, is_subcommand: b\\n \\n let bin_name = cmd.get_bin_name().expect(\\\"Failed to get bin name\\\");\\n \\n- if is_subcommand {\\n- completions.push_str(format!(\\\" export extern \\\\\\\"{}\\\\\\\" [\\\\n\\\", bin_name).as_str());\\n+ let name = if is_subcommand {\\n+ format!(r#\\\"\\\"{}\\\"\\\"#, bin_name)\\n } else {\\n- completions.push_str(format!(\\\" export extern {} [\\\\n\\\", bin_name).as_str());\\n- }\\n+ bin_name.into()\\n+ };\\n \\n- let mut s = String::new();\\n- for arg in cmd.get_arguments() {\\n- if arg.is_positional() {\\n- s.push_str(format!(\\\" {}\\\", arg.get_id()).as_str());\\n- if !arg.is_required_set() {\\n- s.push('?');\\n- }\\n- }\\n-\\n- let long = arg.get_long();\\n- if let Some(opt) = long {\\n- s.push_str(format!(\\\" --{}\\\", opt).as_str());\\n- }\\n+ completions.push_str(format!(\\\" export extern {} [\\\\n\\\", name).as_str());\\n \\n- let short = arg.get_short();\\n- if let Some(opt) = short {\\n- if long.is_some() {\\n- s.push_str(format!(\\\"(-{})\\\", opt).as_str());\\n- } else {\\n- s.push_str(format!(\\\" -{}\\\", opt).as_str());\\n- }\\n- }\\n-\\n- if let Some(v) = arg.get_num_args() {\\n- if v.takes_values() {\\n- // TODO: add more types?\\n- // TODO: add possible values?\\n- s.push_str(\\\": string\\\");\\n- }\\n- }\\n-\\n- if let Some(msg) = arg.get_help() {\\n- if arg.is_positional() || long.is_some() || short.is_some() {\\n- s.push_str(format!(\\\"\\\\t# {}\\\", msg).as_str());\\n- }\\n- }\\n-\\n- s.push('\\\\n');\\n- }\\n+ let s: String = cmd\\n+ .get_arguments()\\n+ .map(|arg| ArgumentLine::from(arg).to_string())\\n+ .collect();\\n \\n completions.push_str(&s);\\n completions.push_str(\\\" ]\\\\n\\\\n\\\");\\n\", \"diff --git a/CHANGELOG.md b/CHANGELOG.md\\nindex 7b98b44..f17ad6f 100644\\n--- a/CHANGELOG.md\\n+++ b/CHANGELOG.md\\n@@ -7,6 +7,9 @@\\n \\n - **(css/parser)** Fix parsing of at rules (#3328) ([506a310](https://github.com/swc-project/swc/commit/506a31078aaebf50129658f096bbd5929995205f))\\n \\n+\\n+- **(es/compat)** Fix regression of `destructuring` (#3326) ([6d1ad36](https://github.com/swc-project/swc/commit/6d1ad368aca53ee64a63ae565cd015909f2f4458))\\n+\\n ### Performance\\n \\n \\ndiff --git a/Cargo.lock b/Cargo.lock\\nindex 3c6598b..4baa252 100644\\n--- a/Cargo.lock\\n+++ b/Cargo.lock\\n@@ -2652,7 +2652,7 @@ dependencies = [\\n \\n [[package]]\\n name = \\\"swc\\\"\\n-version = \\\"0.116.15\\\"\\n+version = \\\"0.116.16\\\"\\n dependencies = [\\n \\\"ahash\\\",\\n \\\"anyhow\\\",\\n@@ -3097,7 +3097,7 @@ dependencies = [\\n \\n [[package]]\\n name = \\\"swc_ecma_transforms\\\"\\n-version = \\\"0.113.3\\\"\\n+version = \\\"0.113.4\\\"\\n dependencies = [\\n \\\"pretty_assertions 0.7.2\\\",\\n \\\"sourcemap\\\",\\n@@ -3157,7 +3157,7 @@ dependencies = [\\n \\n [[package]]\\n name = \\\"swc_ecma_transforms_compat\\\"\\n-version = \\\"0.68.2\\\"\\n+version = \\\"0.68.3\\\"\\n dependencies = [\\n \\\"ahash\\\",\\n \\\"arrayvec 0.7.2\\\",\\n@@ -3366,7 +3366,7 @@ dependencies = [\\n \\n [[package]]\\n name = \\\"swc_ecmascript\\\"\\n-version = \\\"0.110.14\\\"\\n+version = \\\"0.110.15\\\"\\n dependencies = [\\n \\\"swc_ecma_ast\\\",\\n \\\"swc_ecma_codegen\\\",\\ndiff --git a/crates/swc/Cargo.toml b/crates/swc/Cargo.toml\\nindex 756cfc8..2f02d22 100644\\n--- a/crates/swc/Cargo.toml\\n+++ b/crates/swc/Cargo.toml\\n@@ -9,7 +9,7 @@ include = [\\\"Cargo.toml\\\", \\\"src/**/*.rs\\\"]\\n license = \\\"Apache-2.0\\\"\\n name = \\\"swc\\\"\\n repository = \\\"https://github.com/swc-project/swc.git\\\"\\n-version = \\\"0.116.15\\\"\\n+version = \\\"0.116.16\\\"\\n \\n [lib]\\n name = \\\"swc\\\"\\n@@ -55,7 +55,7 @@ swc_ecma_loader = {version = \\\"0.27.0\\\", path = \\\"../swc_ecma_loader\\\", features = [\\n swc_ecma_minifier = {version = \\\"0.70.9\\\", path = \\\"../swc_ecma_minifier\\\"}\\n swc_ecma_parser = {version = \\\"0.87.0\\\", path = \\\"../swc_ecma_parser\\\"}\\n swc_ecma_preset_env = {version = \\\"0.86.1\\\", path = \\\"../swc_ecma_preset_env\\\"}\\n-swc_ecma_transforms = {version = \\\"0.113.3\\\", path = \\\"../swc_ecma_transforms\\\", features = [\\n+swc_ecma_transforms = {version = \\\"0.113.4\\\", path = \\\"../swc_ecma_transforms\\\", features = [\\n \\\"compat\\\",\\n \\\"module\\\",\\n \\\"optimization\\\",\\n@@ -64,11 +64,11 @@ swc_ecma_transforms = {version = \\\"0.113.3\\\", path = \\\"../swc_ecma_transforms\\\", fea\\n \\\"typescript\\\",\\n ]}\\n swc_ecma_transforms_base = {version = \\\"0.57.1\\\", path = \\\"../swc_ecma_transforms_base\\\"}\\n-swc_ecma_transforms_compat = {version = \\\"0.68.2\\\", path = \\\"../swc_ecma_transforms_compat\\\"}\\n+swc_ecma_transforms_compat = {version = \\\"0.68.3\\\", path = \\\"../swc_ecma_transforms_compat\\\"}\\n swc_ecma_transforms_optimization = {version = \\\"0.83.0\\\", path = \\\"../swc_ecma_transforms_optimization\\\"}\\n swc_ecma_utils = {version = \\\"0.64.0\\\", path = \\\"../swc_ecma_utils\\\"}\\n swc_ecma_visit = {version = \\\"0.51.1\\\", path = \\\"../swc_ecma_visit\\\"}\\n-swc_ecmascript = {version = \\\"0.110.14\\\", path = \\\"../swc_ecmascript\\\"}\\n+swc_ecmascript = {version = \\\"0.110.15\\\", path = \\\"../swc_ecmascript\\\"}\\n swc_node_comments = {version = \\\"0.4.0\\\", path = \\\"../swc_node_comments\\\"}\\n swc_plugin_runner = {version = \\\"0.30.0\\\", path = \\\"../swc_plugin_runner\\\", optional = true}\\n swc_visit = {version = \\\"0.3.0\\\", path = \\\"../swc_visit\\\"}\\ndiff --git a/crates/swc_ecma_transforms/Cargo.toml b/crates/swc_ecma_transforms/Cargo.toml\\nindex 1604f4e..a0aafae 100644\\n--- a/crates/swc_ecma_transforms/Cargo.toml\\n+++ b/crates/swc_ecma_transforms/Cargo.toml\\n@@ -6,7 +6,7 @@ edition = \\\"2021\\\"\\n license = \\\"Apache-2.0\\\"\\n name = \\\"swc_ecma_transforms\\\"\\n repository = \\\"https://github.com/swc-project/swc.git\\\"\\n-version = \\\"0.113.3\\\"\\n+version = \\\"0.113.4\\\"\\n \\n [package.metadata.docs.rs]\\n all-features = true\\n@@ -28,7 +28,7 @@ swc_common = {version = \\\"0.17.0\\\", path = \\\"../swc_common\\\"}\\n swc_ecma_ast = {version = \\\"0.65.0\\\", path = \\\"../swc_ecma_ast\\\"}\\n swc_ecma_parser = {version = \\\"0.87.0\\\", path = \\\"../swc_ecma_parser\\\"}\\n swc_ecma_transforms_base = {version = \\\"0.57.1\\\", path = \\\"../swc_ecma_transforms_base\\\"}\\n-swc_ecma_transforms_compat = {version = \\\"0.68.2\\\", path = \\\"../swc_ecma_transforms_compat\\\", optional = true}\\n+swc_ecma_transforms_compat = {version = \\\"0.68.3\\\", path = \\\"../swc_ecma_transforms_compat\\\", optional = true}\\n swc_ecma_transforms_module = {version = \\\"0.74.0\\\", path = \\\"../swc_ecma_transforms_module\\\", optional = true}\\n swc_ecma_transforms_optimization = {version = \\\"0.83.0\\\", path = \\\"../swc_ecma_transforms_optimization\\\", optional = true}\\n swc_ecma_transforms_proposal = {version = \\\"0.74.0\\\", path = \\\"../swc_ecma_transforms_proposal\\\", optional = true}\\ndiff --git a/crates/swc_ecma_transforms_compat/Cargo.toml b/crates/swc_ecma_transforms_compat/Cargo.toml\\nindex 0ea6609..58374e3 100644\\n--- a/crates/swc_ecma_transforms_compat/Cargo.toml\\n+++ b/crates/swc_ecma_transforms_compat/Cargo.toml\\n@@ -6,7 +6,7 @@ edition = \\\"2021\\\"\\n license = \\\"Apache-2.0\\\"\\n name = \\\"swc_ecma_transforms_compat\\\"\\n repository = \\\"https://github.com/swc-project/swc.git\\\"\\n-version = \\\"0.68.2\\\"\\n+version = \\\"0.68.3\\\"\\n # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\\n \\n [features]\\ndiff --git a/crates/swc_ecmascript/Cargo.toml b/crates/swc_ecmascript/Cargo.toml\\nindex 63680a0..775208a 100644\\n--- a/crates/swc_ecmascript/Cargo.toml\\n+++ b/crates/swc_ecmascript/Cargo.toml\\n@@ -6,7 +6,7 @@ edition = \\\"2021\\\"\\n license = \\\"Apache-2.0\\\"\\n name = \\\"swc_ecmascript\\\"\\n repository = \\\"https://github.com/swc-project/swc.git\\\"\\n-version = \\\"0.110.14\\\"\\n+version = \\\"0.110.15\\\"\\n \\n [package.metadata.docs.rs]\\n all-features = true\\n@@ -39,7 +39,7 @@ swc_ecma_dep_graph = {version = \\\"0.58.0\\\", path = \\\"../swc_ecma_dep_graph\\\", option\\n swc_ecma_minifier = {version = \\\"0.70.9\\\", path = \\\"../swc_ecma_minifier\\\", optional = true}\\n swc_ecma_parser = {version = \\\"0.87.0\\\", path = \\\"../swc_ecma_parser\\\", optional = true, default-features = false}\\n swc_ecma_preset_env = {version = \\\"0.86.1\\\", path = \\\"../swc_ecma_preset_env\\\", optional = true}\\n-swc_ecma_transforms = {version = \\\"0.113.3\\\", path = \\\"../swc_ecma_transforms\\\", optional = true}\\n+swc_ecma_transforms = {version = \\\"0.113.4\\\", path = \\\"../swc_ecma_transforms\\\", optional = true}\\n swc_ecma_utils = {version = \\\"0.64.0\\\", path = \\\"../swc_ecma_utils\\\", optional = true}\\n swc_ecma_visit = {version = \\\"0.51.1\\\", path = \\\"../swc_ecma_visit\\\", optional = true}\\n \\n\", \"diff --git a/core/main/tsconfig.json b/core/main/tsconfig.json\\nindex c4474a7..7916bc5 100644\\n--- a/core/main/tsconfig.json\\n+++ b/core/main/tsconfig.json\\n@@ -96,11 +96,35 @@\\n \\\"particles\\\": {\\n \\\"groups\\\": [\\n {\\n- \\\"title\\\": \\\"Documentation\\\",\\n+ \\\"title\\\": \\\"Components\\\",\\n \\\"pages\\\": [\\n {\\n- \\\"title\\\": \\\"My Page\\\",\\n- \\\"source\\\": \\\"./markdown/pages/index.md\\\"\\n+ \\\"title\\\": \\\"Angular\\\",\\n+ \\\"source\\\": \\\"../../components/angular/README.md\\\"\\n+ },\\n+ {\\n+ \\\"title\\\": \\\"React\\\",\\n+ \\\"source\\\": \\\"../../components/react/README.md\\\"\\n+ },\\n+ {\\n+ \\\"title\\\": \\\"Vue\\\",\\n+ \\\"source\\\": \\\"../../components/vue/README.md\\\"\\n+ },\\n+ {\\n+ \\\"title\\\": \\\"Svelte\\\",\\n+ \\\"source\\\": \\\"../../components/svelte/README.md\\\"\\n+ },\\n+ {\\n+ \\\"title\\\": \\\"jQuery\\\",\\n+ \\\"source\\\": \\\"../../components/jquery/README.md\\\"\\n+ },\\n+ {\\n+ \\\"title\\\": \\\"Preact\\\",\\n+ \\\"source\\\": \\\"../../components/preact/README.md\\\"\\n+ },\\n+ {\\n+ \\\"title\\\": \\\"Inferno\\\",\\n+ \\\"source\\\": \\\"../../components/inferno/README.md\\\"\\n }\\n ]\\n }\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"bed86aeae8dad2dd6371635cd24bf8ef3db80361\", \"c81a0c2999454c859b4bf4da5779712960d239be\", \"f1bc5a554af4e617c7d7508f7f16f8fd25c78c91\", \"af53b9487f74ff28438928903fb1f2db93fe4fa8\", \"fca2c198c6486c4d586b1af1832be46f19667235\"]"},"types":{"kind":"string","value":"[\"test\", \"ci\", \"refactor\", \"build\", \"docs\"]"}}},{"rowIdx":1370,"cells":{"commit_message":{"kind":"string","value":"e2e,fix golden tests for aws_vpn_connection,updated to lerna v6,serialize access to StreamObserver,add ability to get all encoded values"},"diff":{"kind":"string","value":"[\"diff --git a/.github/workflows/kibbeh-e2e_tests.yaml b/.github/workflows/kibbeh-e2e_tests.yaml\\nindex 52bf3ed..74fe785 100644\\n--- a/.github/workflows/kibbeh-e2e_tests.yaml\\n+++ b/.github/workflows/kibbeh-e2e_tests.yaml\\n@@ -4,7 +4,7 @@ on:\\n branches:\\n - staging\\n paths:\\n- - '.github/workflows/end-to-end-tests.yaml'\\n+ - '.github/workflows/kibbeh-e2e_tests.yaml'\\n - 'kousa/lib/**'\\n - 'kibbeh/src/**'\\n pull_request:\\n\", \"diff --git a/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf b/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\\nindex d895677..cf10e3f 100644\\n--- a/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\\n+++ b/internal/providers/terraform/aws/testdata/vpn_connection_test/vpn_connection_test.tf\\n@@ -12,6 +12,7 @@ provider \\\"aws\\\" {\\n resource \\\"aws_vpn_connection\\\" \\\"vpn_connection\\\" {\\n customer_gateway_id = \\\"dummy-customer-gateway-id\\\"\\n type = \\\"ipsec.1\\\"\\n+ vpn_gateway_id = \\\"vpn-gateway-id\\\"\\n }\\n \\n resource \\\"aws_vpn_connection\\\" \\\"transit\\\" {\\n@@ -23,10 +24,11 @@ resource \\\"aws_vpn_connection\\\" \\\"transit\\\" {\\n resource \\\"aws_vpn_connection\\\" \\\"vpn_connection_withUsage\\\" {\\n customer_gateway_id = \\\"dummy-customer-gateway-id2\\\"\\n type = \\\"ipsec.1\\\"\\n+ vpn_gateway_id = \\\"vpn-gateway-id\\\"\\n }\\n \\n resource \\\"aws_vpn_connection\\\" \\\"transit_withUsage\\\" {\\n customer_gateway_id = \\\"dummy-customer-gateway-id2\\\"\\n type = \\\"ipsec.1\\\"\\n transit_gateway_id = \\\"dummy-transit-gateway-id2\\\"\\n-}\\n\\\\ No newline at end of file\\n+}\\n\", \"diff --git a/lerna.json b/lerna.json\\nindex 29cb6ed..4488fb6 100644\\n--- a/lerna.json\\n+++ b/lerna.json\\n@@ -25,7 +25,6 @@\\n \\\"npmClient\\\": \\\"pnpm\\\",\\n \\\"useWorkspaces\\\": true,\\n \\\"conventionalCommits\\\": true,\\n- \\\"useNx\\\": true,\\n \\\"command\\\": {\\n \\\"version\\\": {\\n \\\"message\\\": \\\"chore(release): published new version\\\"\\ndiff --git a/nx.json b/nx.json\\nindex 00997ba..b3f2627 100644\\n--- a/nx.json\\n+++ b/nx.json\\n@@ -33,32 +33,6 @@\\n }\\n }\\n },\\n- \\\"targetDependencies\\\": {\\n- \\\"build\\\": [\\n- {\\n- \\\"target\\\": \\\"build\\\",\\n- \\\"projects\\\": \\\"dependencies\\\"\\n- }\\n- ],\\n- \\\"build:ci\\\": [\\n- {\\n- \\\"target\\\": \\\"build:ci\\\",\\n- \\\"projects\\\": \\\"dependencies\\\"\\n- }\\n- ],\\n- \\\"prepare\\\": [\\n- {\\n- \\\"target\\\": \\\"prepare\\\",\\n- \\\"projects\\\": \\\"dependencies\\\"\\n- }\\n- ],\\n- \\\"package\\\": [\\n- {\\n- \\\"target\\\": \\\"package\\\",\\n- \\\"projects\\\": \\\"dependencies\\\"\\n- }\\n- ]\\n- },\\n \\\"affected\\\": {\\n \\\"defaultBase\\\": \\\"main\\\"\\n },\\n@@ -66,5 +40,28 @@\\n \\\"@nrwl/js\\\": {\\n \\\"analyzeSourceFiles\\\": false\\n }\\n+ },\\n+ \\\"$schema\\\": \\\"./node_modules/nx/schemas/nx-schema.json\\\",\\n+ \\\"targetDefaults\\\": {\\n+ \\\"build\\\": {\\n+ \\\"dependsOn\\\": [\\n+ \\\"^build\\\"\\n+ ]\\n+ },\\n+ \\\"build:ci\\\": {\\n+ \\\"dependsOn\\\": [\\n+ \\\"^build:ci\\\"\\n+ ]\\n+ },\\n+ \\\"prepare\\\": {\\n+ \\\"dependsOn\\\": [\\n+ \\\"^prepare\\\"\\n+ ]\\n+ },\\n+ \\\"package\\\": {\\n+ \\\"dependsOn\\\": [\\n+ \\\"^package\\\"\\n+ ]\\n+ }\\n }\\n }\\n\", \"diff --git a/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java b/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\\nindex ae2b1c0..8ed64e5 100644\\n--- a/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\\n+++ b/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\\n@@ -22,6 +22,7 @@ import io.camunda.zeebe.transport.stream.api.ClientStreamer;\\n import io.camunda.zeebe.util.VisibleForTesting;\\n import io.grpc.Status;\\n import io.grpc.StatusRuntimeException;\\n+import io.grpc.internal.SerializingExecutor;\\n import io.grpc.stub.ServerCallStreamObserver;\\n import io.grpc.stub.StreamObserver;\\n import java.util.concurrent.CompletableFuture;\\n@@ -83,12 +84,12 @@ public class ClientStreamAdapter {\\n @VisibleForTesting(\\\"Allow unit testing behavior job handling behavior\\\")\\n static final class ClientStreamConsumerImpl implements ClientStreamConsumer {\\n private final StreamObserver responseObserver;\\n- private final Executor executor;\\n+ private final SerializingExecutor executor;\\n \\n public ClientStreamConsumerImpl(\\n final StreamObserver responseObserver, final Executor executor) {\\n this.responseObserver = responseObserver;\\n- this.executor = executor;\\n+ this.executor = new SerializingExecutor(executor);\\n }\\n \\n @Override\\n\", \"diff --git a/delorean_mem_qe/src/column.rs b/delorean_mem_qe/src/column.rs\\nindex bc89cb2..b3df18e 100644\\n--- a/delorean_mem_qe/src/column.rs\\n+++ b/delorean_mem_qe/src/column.rs\\n@@ -537,6 +537,22 @@ impl Column {\\n }\\n }\\n \\n+ /// Materialise all of the encoded values.\\n+ pub fn all_encoded_values(&self) -> Vector {\\n+ match self {\\n+ Column::String(c) => {\\n+ let now = std::time::Instant::now();\\n+ let v = c.all_encoded_values();\\n+ log::debug!(\\\"time getting all encoded values {:?}\\\", now.elapsed());\\n+\\n+ log::debug!(\\\"dictionary {:?}\\\", c.data.dictionary());\\n+ Vector::Integer(v)\\n+ }\\n+ Column::Float(c) => Vector::Float(c.all_encoded_values()),\\n+ Column::Integer(c) => Vector::Integer(c.all_encoded_values()),\\n+ }\\n+ }\\n+\\n /// Given an encoded value for a row, materialise and return the decoded\\n /// version.\\n ///\\n@@ -986,6 +1002,10 @@ impl String {\\n self.data.encoded_values(row_ids)\\n }\\n \\n+ pub fn all_encoded_values(&self) -> Vec {\\n+ self.data.all_encoded_values()\\n+ }\\n+\\n /// Return the decoded value for an encoded ID.\\n ///\\n /// Panics if there is no decoded value for the provided id\\n@@ -1037,6 +1057,10 @@ impl Float {\\n self.data.encoded_values(row_ids)\\n }\\n \\n+ pub fn all_encoded_values(&self) -> Vec {\\n+ self.data.all_encoded_values()\\n+ }\\n+\\n pub fn scan_from(&self, row_id: usize) -> &[f64] {\\n self.data.scan_from(row_id)\\n }\\n@@ -1106,6 +1130,10 @@ impl Integer {\\n self.data.encoded_values(row_ids)\\n }\\n \\n+ pub fn all_encoded_values(&self) -> Vec {\\n+ self.data.all_encoded_values()\\n+ }\\n+\\n pub fn scan_from(&self, row_id: usize) -> &[i64] {\\n self.data.scan_from(row_id)\\n }\\ndiff --git a/delorean_mem_qe/src/encoding.rs b/delorean_mem_qe/src/encoding.rs\\nindex d6a865a..4b057cf 100644\\n--- a/delorean_mem_qe/src/encoding.rs\\n+++ b/delorean_mem_qe/src/encoding.rs\\n@@ -68,6 +68,12 @@ where\\n self.values(row_ids)\\n }\\n \\n+ /// Return all encoded values. For this encoding this is just the decoded\\n+ /// values\\n+ pub fn all_encoded_values(&self) -> Vec {\\n+ self.values.clone()\\n+ }\\n+\\n // TODO(edd): fix this when added NULL support\\n pub fn scan_from_until_some(&self, _row_id: usize) -> Option {\\n unreachable!(\\\"to remove\\\");\\n@@ -485,6 +491,26 @@ impl DictionaryRLE {\\n out\\n }\\n \\n+ // values materialises a vector of references to all logical values in the\\n+ // encoding.\\n+ pub fn all_values(&mut self) -> Vec> {\\n+ let mut out: Vec> = Vec::with_capacity(self.total as usize);\\n+\\n+ // build reverse mapping.\\n+ let mut idx_value = BTreeMap::new();\\n+ for (k, v) in &self.entry_index {\\n+ idx_value.insert(v, k);\\n+ }\\n+ assert_eq!(idx_value.len(), self.entry_index.len());\\n+\\n+ for (idx, rl) in &self.run_lengths {\\n+ // TODO(edd): fix unwrap - we know that the value exists in map...\\n+ let v = idx_value.get(&idx).unwrap().as_ref();\\n+ out.extend(iter::repeat(v).take(*rl as usize));\\n+ }\\n+ out\\n+ }\\n+\\n /// Return the decoded value for an encoded ID.\\n ///\\n /// Panics if there is no decoded value for the provided id\\n@@ -528,22 +554,13 @@ impl DictionaryRLE {\\n out\\n }\\n \\n- // values materialises a vector of references to all logical values in the\\n- // encoding.\\n- pub fn all_values(&mut self) -> Vec> {\\n- let mut out: Vec> = Vec::with_capacity(self.total as usize);\\n-\\n- // build reverse mapping.\\n- let mut idx_value = BTreeMap::new();\\n- for (k, v) in &self.entry_index {\\n- idx_value.insert(v, k);\\n- }\\n- assert_eq!(idx_value.len(), self.entry_index.len());\\n+ // all_encoded_values materialises a vector of all encoded values for the\\n+ // column.\\n+ pub fn all_encoded_values(&self) -> Vec {\\n+ let mut out: Vec = Vec::with_capacity(self.total as usize);\\n \\n for (idx, rl) in &self.run_lengths {\\n- // TODO(edd): fix unwrap - we know that the value exists in map...\\n- let v = idx_value.get(&idx).unwrap().as_ref();\\n- out.extend(iter::repeat(v).take(*rl as usize));\\n+ out.extend(iter::repeat(*idx as i64).take(*rl as usize));\\n }\\n out\\n }\\ndiff --git a/delorean_mem_qe/src/segment.rs b/delorean_mem_qe/src/segment.rs\\nindex c058df0..f8c5005 100644\\n--- a/delorean_mem_qe/src/segment.rs\\n+++ b/delorean_mem_qe/src/segment.rs\\n@@ -228,7 +228,7 @@ impl Segment {\\n group_columns: &[String],\\n aggregates: &[(String, AggregateType)],\\n window: i64,\\n- ) -> BTreeMap, Vec<(String, Option)>> {\\n+ ) -> BTreeMap, Vec<(&String, &AggregateType, Option)>> {\\n // Build a hash table - essentially, scan columns for matching row ids,\\n // emitting the encoded value for each column and track those value\\n // combinations in a hashmap with running aggregates.\\n@@ -242,6 +242,10 @@ impl Segment {\\n assert_ne!(group_columns[group_columns.len() - 1], \\\"time\\\");\\n }\\n \\n+ // TODO(edd): Perf - if there is no predicate and we want entire segment\\n+ // then it will be a lot faster to not build filtered_row_ids and just\\n+ // get all encoded values for each grouping column...\\n+\\n // filter on predicates and time\\n let filtered_row_ids: croaring::Bitmap;\\n if let Some(row_ids) = self.filter_by_predicates_eq(time_range, predicates) {\\n@@ -263,7 +267,12 @@ impl Segment {\\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\\n for group_column in group_columns {\\n if let Some(column) = self.column(&group_column) {\\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\\n+ column.all_encoded_values()\\n+ } else {\\n+ column.encoded_values(&filtered_row_ids_vec)\\n+ };\\n+\\n assert_eq!(\\n filtered_row_ids.cardinality() as usize,\\n encoded_values.len()\\n@@ -325,10 +334,10 @@ impl Segment {\\n .collect::>();\\n \\n // hashMap is about 20% faster than BTreeMap in this case\\n- let mut hash_table: HashMap<\\n+ let mut hash_table: BTreeMap<\\n Vec,\\n Vec<(&String, &AggregateType, Option)>,\\n- > = HashMap::new();\\n+ > = BTreeMap::new();\\n \\n let mut aggregate_row: Vec<(&str, Option)> =\\n std::iter::repeat_with(|| (\\\"\\\", None))\\n@@ -406,8 +415,10 @@ impl Segment {\\n }\\n processed_rows += 1;\\n }\\n+ // println!(\\\"groups: {:?}\\\", hash_table.len());\\n log::debug!(\\\"({:?} rows processed) {:?}\\\", processed_rows, hash_table);\\n BTreeMap::new()\\n+ // hash_table\\n }\\n \\n pub fn aggregate_by_group_using_sort(\\n@@ -451,7 +462,11 @@ impl Segment {\\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\\n for group_column in group_columns {\\n if let Some(column) = self.column(&group_column) {\\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\\n+ column.all_encoded_values()\\n+ } else {\\n+ column.encoded_values(&filtered_row_ids_vec)\\n+ };\\n assert_eq!(\\n filtered_row_ids.cardinality() as usize,\\n encoded_values.len()\\n@@ -557,6 +572,10 @@ impl Segment {\\n assert_ne!(group_columns[group_columns.len() - 1], \\\"time\\\");\\n }\\n \\n+ // TODO(edd): Perf - if there is no predicate and we want entire segment\\n+ // then it will be a lot faster to not build filtered_row_ids and just\\n+ // get all encoded values for each grouping column...\\n+\\n // filter on predicates and time\\n let filtered_row_ids: croaring::Bitmap;\\n if let Some(row_ids) = self.filter_by_predicates_eq(time_range, predicates) {\\n@@ -577,7 +596,11 @@ impl Segment {\\n let mut group_column_encoded_values = Vec::with_capacity(group_columns.len());\\n for group_column in group_columns {\\n if let Some(column) = self.column(&group_column) {\\n- let encoded_values = column.encoded_values(&filtered_row_ids_vec);\\n+ let encoded_values = if filtered_row_ids_vec.len() == self.meta.rows {\\n+ column.all_encoded_values()\\n+ } else {\\n+ column.encoded_values(&filtered_row_ids_vec)\\n+ };\\n assert_eq!(\\n filtered_row_ids.cardinality() as usize,\\n encoded_values.len()\\n@@ -709,6 +732,7 @@ impl Segment {\\n aggregates: group_key_aggregates,\\n });\\n \\n+ // println!(\\\"groups: {:?}\\\", results.len());\\n log::debug!(\\\"({:?} rows processed) {:?}\\\", processed_rows, results);\\n // results\\n vec![]\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"02f9e79a755a1dd95e661c674d6966837f378442\", \"9b059dd8245e72f0bf8c40fc633f9ef6fccae405\", \"aa30370b796c1f46cc60aa56b2918c3d27c0cf88\", \"22044d58302513f5cf22b06151c4a367bbb88f6e\", \"cad5e45208346528ad02cd04dcac863f90faa037\"]"},"types":{"kind":"string","value":"[\"ci\", \"test\", \"build\", \"fix\", \"feat\"]"}}},{"rowIdx":1371,"cells":{"commit_message":{"kind":"string","value":"set cursor position in setHorizontalRule correctly, fix #2429,bundle and tree shake assets with webpack,backup manager can mark inprogress backups as failed,retry uploading pdb files on appveyor (#21561),fixa few issues"},"diff":{"kind":"string","value":"[\"diff --git a/packages/extension-horizontal-rule/src/horizontal-rule.ts b/packages/extension-horizontal-rule/src/horizontal-rule.ts\\nindex 6f583e1..c905b63 100644\\n--- a/packages/extension-horizontal-rule/src/horizontal-rule.ts\\n+++ b/packages/extension-horizontal-rule/src/horizontal-rule.ts\\n@@ -49,15 +49,14 @@ export const HorizontalRule = Node.create({\\n // set cursor after horizontal rule\\n .command(({ tr, dispatch }) => {\\n if (dispatch) {\\n- const { parent, pos } = tr.selection.$from\\n- const posAfter = pos + 1\\n- const nodeAfter = tr.doc.nodeAt(posAfter)\\n+ const { $to } = tr.selection\\n+ const posAfter = $to.end()\\n \\n- if (nodeAfter) {\\n- tr.setSelection(TextSelection.create(tr.doc, posAfter))\\n+ if ($to.nodeAfter) {\\n+ tr.setSelection(TextSelection.create(tr.doc, $to.pos))\\n } else {\\n // add node after horizontal rule if it\\u2019s the end of the document\\n- const node = parent.type.contentMatch.defaultType?.create()\\n+ const node = $to.parent.type.contentMatch.defaultType?.create()\\n \\n if (node) {\\n tr.insert(posAfter, node)\\n\", \"diff --git a/package.json b/package.json\\nindex c8051d2..b0a97fb 100644\\n--- a/package.json\\n+++ b/package.json\\n@@ -60,6 +60,7 @@\\n \\\"babel-cli\\\": \\\"^6.16.0\\\",\\n \\\"babel-core\\\": \\\"^6.16.0\\\",\\n \\\"babel-eslint\\\": \\\"^7.0.0\\\",\\n+ \\\"babel-loader\\\": \\\"^6.2.5\\\",\\n \\\"babel-plugin-transform-class-properties\\\": \\\"^6.10.2\\\",\\n \\\"babel-plugin-transform-flow-strip-types\\\": \\\"^6.14.0\\\",\\n \\\"babel-preset-es2015-node6\\\": \\\"^0.3.0\\\",\\n@@ -82,6 +83,7 @@\\n \\\"eslint-plugin-react\\\": \\\"^6.3.0\\\",\\n \\\"flow-bin\\\": \\\"^0.33.0\\\",\\n \\\"jsdom\\\": \\\"^9.4.2\\\",\\n+ \\\"json-loader\\\": \\\"^0.5.4\\\",\\n \\\"jsx-chai\\\": \\\"^4.0.0\\\",\\n \\\"mocha\\\": \\\"^3.0.2\\\",\\n \\\"mock-require\\\": \\\"^1.3.0\\\",\\n@@ -91,6 +93,8 @@\\n \\\"rimraf\\\": \\\"^2.5.2\\\",\\n \\\"sinon\\\": \\\"^1.17.6\\\",\\n \\\"sinon-chai\\\": \\\"^2.8.0\\\",\\n- \\\"watch\\\": \\\"^1.0.0\\\"\\n+ \\\"source-map-support\\\": \\\"^0.4.3\\\",\\n+ \\\"watch\\\": \\\"^1.0.0\\\",\\n+ \\\"webpack\\\": \\\"^1.13.2\\\"\\n }\\n }\\ndiff --git a/webpack.config.js b/webpack.config.js\\nnew file mode 100644\\nindex 0000000..0ca6da1\\n--- /dev/null\\n+++ b/webpack.config.js\\n@@ -0,0 +1,44 @@\\n+const webpack = require('webpack');\\n+const path = require('path');\\n+const fs = require('fs');\\n+\\n+const nodeModules = {\\n+ zmq: 'commonjs zmq',\\n+ jmp: 'commonjs jmp',\\n+ github: 'commonjs github',\\n+};\\n+\\n+module.exports = {\\n+ entry: './src/notebook/index.js',\\n+ target: 'electron-renderer',\\n+ output: {\\n+ path: path.join(__dirname, 'app', 'build'),\\n+ filename: 'webpacked-notebook.js'\\n+ },\\n+ module: {\\n+ loaders: [\\n+ { test: /\\\\.js$/, exclude: /node_modules/, loaders: ['babel'] },\\n+ { test: /\\\\.json$/, loader: 'json-loader' },\\n+ ]\\n+ },\\n+ resolve: {\\n+ extensions: ['', '.js', '.jsx'],\\n+ root: path.join(__dirname, 'app'),\\n+ // Webpack 1\\n+ modulesDirectories: [\\n+ path.resolve(__dirname, 'app', 'node_modules'),\\n+ path.resolve(__dirname, 'node_modules'),\\n+ ],\\n+ // Webpack 2\\n+ modules: [\\n+ path.resolve(__dirname, 'app', 'node_modules'),\\n+ ],\\n+ },\\n+ externals: nodeModules,\\n+ plugins: [\\n+ new webpack.IgnorePlugin(/\\\\.(css|less)$/),\\n+ new webpack.BannerPlugin('require(\\\"source-map-support\\\").install();',\\n+ { raw: true, entryOnly: false })\\n+ ],\\n+ devtool: 'sourcemap'\\n+};\\n\", \"diff --git a/backup/src/main/java/io/camunda/zeebe/backup/api/BackupManager.java b/backup/src/main/java/io/camunda/zeebe/backup/api/BackupManager.java\\nindex b2dfb98..21eaf6d 100644\\n--- a/backup/src/main/java/io/camunda/zeebe/backup/api/BackupManager.java\\n+++ b/backup/src/main/java/io/camunda/zeebe/backup/api/BackupManager.java\\n@@ -42,4 +42,6 @@ public interface BackupManager {\\n \\n /** Close Backup manager */\\n ActorFuture closeAsync();\\n+\\n+ void failInProgressBackup(long lastCheckpointId);\\n }\\ndiff --git a/backup/src/main/java/io/camunda/zeebe/backup/management/BackupService.java b/backup/src/main/java/io/camunda/zeebe/backup/management/BackupService.java\\nindex a1e1319..33149ae 100644\\n--- a/backup/src/main/java/io/camunda/zeebe/backup/management/BackupService.java\\n+++ b/backup/src/main/java/io/camunda/zeebe/backup/management/BackupService.java\\n@@ -16,6 +16,7 @@ import io.camunda.zeebe.scheduler.future.ActorFuture;\\n import io.camunda.zeebe.scheduler.future.CompletableActorFuture;\\n import io.camunda.zeebe.snapshots.PersistedSnapshotStore;\\n import java.nio.file.Path;\\n+import java.util.List;\\n import java.util.function.Predicate;\\n import org.slf4j.Logger;\\n import org.slf4j.LoggerFactory;\\n@@ -31,11 +32,13 @@ public final class BackupService extends Actor implements BackupManager {\\n private final PersistedSnapshotStore snapshotStore;\\n private final Path segmentsDirectory;\\n private final Predicate isSegmentsFile;\\n+ private List partitionMembers;\\n \\n public BackupService(\\n final int nodeId,\\n final int partitionId,\\n final int numberOfPartitions,\\n+ final List partitionMembers,\\n final PersistedSnapshotStore snapshotStore,\\n final Predicate isSegmentsFile,\\n final Path segmentsDirectory) {\\n@@ -48,6 +51,7 @@ public final class BackupService extends Actor implements BackupManager {\\n snapshotStore,\\n segmentsDirectory,\\n isSegmentsFile);\\n+ this.partitionMembers = partitionMembers;\\n }\\n \\n public BackupService(\\n@@ -122,6 +126,12 @@ public final class BackupService extends Actor implements BackupManager {\\n new UnsupportedOperationException(\\\"Not implemented\\\"));\\n }\\n \\n+ @Override\\n+ public void failInProgressBackup(final long lastCheckpointId) {\\n+ internalBackupManager.failInProgressBackups(\\n+ partitionId, lastCheckpointId, partitionMembers, actor);\\n+ }\\n+\\n private BackupIdentifierImpl getBackupId(final long checkpointId) {\\n return new BackupIdentifierImpl(nodeId, partitionId, checkpointId);\\n }\\ndiff --git a/backup/src/main/java/io/camunda/zeebe/backup/management/BackupServiceImpl.java b/backup/src/main/java/io/camunda/zeebe/backup/management/BackupServiceImpl.java\\nindex e462dd5..f6d76b6 100644\\n--- a/backup/src/main/java/io/camunda/zeebe/backup/management/BackupServiceImpl.java\\n+++ b/backup/src/main/java/io/camunda/zeebe/backup/management/BackupServiceImpl.java\\n@@ -9,16 +9,23 @@ package io.camunda.zeebe.backup.management;\\n \\n import io.camunda.zeebe.backup.api.BackupIdentifier;\\n import io.camunda.zeebe.backup.api.BackupStatus;\\n+import io.camunda.zeebe.backup.api.BackupStatusCode;\\n import io.camunda.zeebe.backup.api.BackupStore;\\n+import io.camunda.zeebe.backup.common.BackupIdentifierImpl;\\n+import io.camunda.zeebe.backup.processing.state.CheckpointState;\\n import io.camunda.zeebe.scheduler.ConcurrencyControl;\\n import io.camunda.zeebe.scheduler.future.ActorFuture;\\n import io.camunda.zeebe.scheduler.future.CompletableActorFuture;\\n+import java.util.Collection;\\n import java.util.HashSet;\\n import java.util.Set;\\n import java.util.function.BiConsumer;\\n import java.util.function.Consumer;\\n+import org.slf4j.Logger;\\n+import org.slf4j.LoggerFactory;\\n \\n final class BackupServiceImpl {\\n+ private static final Logger LOG = LoggerFactory.getLogger(BackupServiceImpl.class);\\n private final Set backupsInProgress = new HashSet<>();\\n private final BackupStore backupStore;\\n private ConcurrencyControl concurrencyControl;\\n@@ -138,4 +145,48 @@ final class BackupServiceImpl {\\n }));\\n return future;\\n }\\n+\\n+ void failInProgressBackups(\\n+ final int partitionId,\\n+ final long lastCheckpointId,\\n+ final Collection brokers,\\n+ final ConcurrencyControl executor) {\\n+ if (lastCheckpointId != CheckpointState.NO_CHECKPOINT) {\\n+ executor.run(\\n+ () -> {\\n+ final var backupIds =\\n+ brokers.stream()\\n+ .map(b -> new BackupIdentifierImpl(b, partitionId, lastCheckpointId))\\n+ .toList();\\n+ // Fail backups initiated by previous leaders\\n+ backupIds.forEach(this::failInProgressBackup);\\n+ });\\n+ }\\n+ }\\n+\\n+ private void failInProgressBackup(final BackupIdentifier backupId) {\\n+ backupStore\\n+ .getStatus(backupId)\\n+ .thenAccept(\\n+ status -> {\\n+ if (status.statusCode() == BackupStatusCode.IN_PROGRESS) {\\n+ LOG.debug(\\n+ \\\"The backup {} initiated by previous leader is still in progress. Marking it as failed.\\\",\\n+ backupId);\\n+ backupStore\\n+ .markFailed(backupId)\\n+ .thenAccept(ignore -> LOG.trace(\\\"Marked backup {} as failed.\\\", backupId))\\n+ .exceptionally(\\n+ failed -> {\\n+ LOG.debug(\\\"Failed to mark backup {} as failed\\\", backupId, failed);\\n+ return null;\\n+ });\\n+ }\\n+ })\\n+ .exceptionally(\\n+ error -> {\\n+ LOG.debug(\\\"Failed to retrieve status of backup {}\\\", backupId);\\n+ return null;\\n+ });\\n+ }\\n }\\ndiff --git a/backup/src/main/java/io/camunda/zeebe/backup/processing/CheckpointRecordsProcessor.java b/backup/src/main/java/io/camunda/zeebe/backup/processing/CheckpointRecordsProcessor.java\\nindex c83fdc1..2899d4d 100644\\n--- a/backup/src/main/java/io/camunda/zeebe/backup/processing/CheckpointRecordsProcessor.java\\n+++ b/backup/src/main/java/io/camunda/zeebe/backup/processing/CheckpointRecordsProcessor.java\\n@@ -14,20 +14,24 @@ import io.camunda.zeebe.backup.processing.state.DbCheckpointState;\\n import io.camunda.zeebe.engine.api.ProcessingResult;\\n import io.camunda.zeebe.engine.api.ProcessingResultBuilder;\\n import io.camunda.zeebe.engine.api.ProcessingScheduleService;\\n+import io.camunda.zeebe.engine.api.ReadonlyStreamProcessorContext;\\n import io.camunda.zeebe.engine.api.RecordProcessor;\\n import io.camunda.zeebe.engine.api.RecordProcessorContext;\\n+import io.camunda.zeebe.engine.api.StreamProcessorLifecycleAware;\\n import io.camunda.zeebe.engine.api.TypedRecord;\\n import io.camunda.zeebe.protocol.impl.record.value.management.CheckpointRecord;\\n import io.camunda.zeebe.protocol.record.ValueType;\\n import io.camunda.zeebe.protocol.record.intent.management.CheckpointIntent;\\n import java.time.Duration;\\n+import java.util.List;\\n import java.util.Set;\\n import java.util.concurrent.CopyOnWriteArraySet;\\n import org.slf4j.Logger;\\n import org.slf4j.LoggerFactory;\\n \\n /** Process and replays records related to Checkpoint. */\\n-public final class CheckpointRecordsProcessor implements RecordProcessor {\\n+public final class CheckpointRecordsProcessor\\n+ implements RecordProcessor, StreamProcessorLifecycleAware {\\n \\n private static final Logger LOG = LoggerFactory.getLogger(CheckpointRecordsProcessor.class);\\n \\n@@ -62,6 +66,8 @@ public final class CheckpointRecordsProcessor implements RecordProcessor {\\n checkpointListeners.forEach(\\n listener -> listener.onNewCheckpointCreated(checkpointState.getCheckpointId()));\\n }\\n+\\n+ recordProcessorContext.addLifecycleListeners(List.of(this));\\n }\\n \\n @Override\\n@@ -126,4 +132,12 @@ public final class CheckpointRecordsProcessor implements RecordProcessor {\\n });\\n }\\n }\\n+\\n+ @Override\\n+ public void onRecovered(final ReadonlyStreamProcessorContext context) {\\n+ // After a leader change, the new leader will not continue taking the backup initiated by\\n+ // previous leader. So mark them as failed, so that the users do not wait forever for it to be\\n+ // completed.\\n+ backupManager.failInProgressBackup(checkpointState.getCheckpointId());\\n+ }\\n }\\ndiff --git a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/steps/BackupServiceTransitionStep.java b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/steps/BackupServiceTransitionStep.java\\nindex 3424e19..591e17b 100644\\n--- a/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/steps/BackupServiceTransitionStep.java\\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/system/partitions/impl/steps/BackupServiceTransitionStep.java\\n@@ -7,6 +7,7 @@\\n */\\n package io.camunda.zeebe.broker.system.partitions.impl.steps;\\n \\n+import io.atomix.cluster.MemberId;\\n import io.atomix.raft.RaftServer.Role;\\n import io.camunda.zeebe.backup.api.BackupManager;\\n import io.camunda.zeebe.backup.management.BackupService;\\n@@ -17,6 +18,7 @@ import io.camunda.zeebe.journal.file.SegmentFile;\\n import io.camunda.zeebe.scheduler.future.ActorFuture;\\n import io.camunda.zeebe.scheduler.future.CompletableActorFuture;\\n import java.nio.file.Path;\\n+import java.util.List;\\n import java.util.function.Predicate;\\n \\n public final class BackupServiceTransitionStep implements PartitionTransitionStep {\\n@@ -69,6 +71,7 @@ public final class BackupServiceTransitionStep implements PartitionTransitionSte\\n context.getNodeId(),\\n context.getPartitionId(),\\n context.getBrokerCfg().getCluster().getPartitionsCount(),\\n+ getPartitionMembers(context),\\n context.getPersistedSnapshotStore(),\\n isSegmentsFile,\\n context.getRaftPartition().dataDirectory().toPath());\\n@@ -90,4 +93,12 @@ public final class BackupServiceTransitionStep implements PartitionTransitionSte\\n });\\n return installed;\\n }\\n+\\n+ // Brokers which are members of this partition's replication group\\n+ private static List getPartitionMembers(final PartitionTransitionContext context) {\\n+ return context.getRaftPartition().members().stream()\\n+ .map(MemberId::id)\\n+ .map(Integer::parseInt)\\n+ .toList();\\n+ }\\n }\\n\", \"diff --git a/appveyor.yml b/appveyor.yml\\nindex 9aca21e..8b54543 100644\\n--- a/appveyor.yml\\n+++ b/appveyor.yml\\n@@ -146,12 +146,12 @@ build_script:\\n - ps: >-\\n if ($env:GN_CONFIG -eq 'release') {\\n python electron\\\\script\\\\zip-symbols.py\\n- appveyor PushArtifact out/Default/symbols.zip\\n+ appveyor-retry appveyor PushArtifact out/Default/symbols.zip\\n } else {\\n # It's useful to have pdb files when debugging testing builds that are\\n # built on CI.\\n 7z a pdb.zip out\\\\Default\\\\*.pdb\\n- appveyor PushArtifact pdb.zip\\n+ appveyor-retry appveyor PushArtifact pdb.zip\\n }\\n - python electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.win.%TARGET_ARCH%.manifest\\n test_script:\\n\", \"diff --git a/README.md b/README.md\\nindex d944d22..5099f03 100644\\n--- a/README.md\\n+++ b/README.md\\n@@ -10,9 +10,8 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\\n \\n \\n \\n-* [Motivation](#motivation)\\n+* [Introduction](#introduction)\\n * [Installation](#installation)\\n- + [Setting up a quick project](#setting-up-a-quick-project)\\n * [Usage](#usage)\\n + [Creating stores](#creating-stores)\\n + [Creating reactive views](#creating-reactive-views)\\n@@ -35,12 +34,14 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\\n Easy State consists of two wrapper functions only. `store` creates state stores and `view` creates reactive components, which re-render whenever state stores are mutated. The rest is just plain JavaScript.\\n \\n ```js\\n-import React, from 'react'\\n+import React from 'react'\\n import { store, view } from 'react-easy-state'\\n \\n+// stores are normal objects\\n const clock = store({ time: new Date() })\\n setInterval(() => clock.time = new Date(), 1000)\\n \\n+// reactive components re-render on store mutations\\n function ClockComp () {\\n return
{clock.time}
\\n }\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"34d80114704679118e9bb6058e0d6c7aa03fd4b5\", \"4ab28fc2e63e975a0c77e18ae644f34fa5f8771a\", \"fb83ef33b699fd966486a922ba1ade4cf8e55858\", \"7152173d26293f4638920b17ce2dfa8ae995193b\", \"b8a664c1b10f4e30a3e221a14211a3cdaf90b7f4\"]"},"types":{"kind":"string","value":"[\"fix\", \"build\", \"feat\", \"ci\", \"docs\"]"}}},{"rowIdx":1372,"cells":{"commit_message":{"kind":"string","value":"test,group example,do not check mkdocs for older versions used in deployments,import flux-lsp v0.5.21,add method to extract snapshot name from filename\n\nalso corrected pattern, where the period was meant to match a period, not any\ncharacter.\n\nrelated to zeebe-io/zeebe#876"},"diff":{"kind":"string","value":"[\"diff --git a/tests/playwright/pages/Dashboard/Command/CmdKPage.ts b/tests/playwright/pages/Dashboard/Command/CmdKPage.ts\\nindex 5ac62b2..0457243 100644\\n--- a/tests/playwright/pages/Dashboard/Command/CmdKPage.ts\\n+++ b/tests/playwright/pages/Dashboard/Command/CmdKPage.ts\\n@@ -21,6 +21,7 @@ export class CmdK extends BasePage {\\n async searchText(text: string) {\\n await this.dashboardPage.rootPage.fill('.cmdk-input', text);\\n await this.rootPage.keyboard.press('Enter');\\n+ await this.rootPage.keyboard.press('Enter');\\n }\\n \\n async isCmdKVisible() {\\n\", \"diff --git a/src/build/arg_group.rs b/src/build/arg_group.rs\\nindex 5201e97..e1b1991 100644\\n--- a/src/build/arg_group.rs\\n+++ b/src/build/arg_group.rs\\n@@ -43,7 +43,7 @@ use crate::util::{Id, Key};\\n /// .arg(\\\"--minor 'auto increase minor'\\\")\\n /// .arg(\\\"--patch 'auto increase patch'\\\")\\n /// .group(ArgGroup::with_name(\\\"vers\\\")\\n-/// .args(&[\\\"set-ver\\\", \\\"major\\\", \\\"minor\\\",\\\"patch\\\"])\\n+/// .args(&[\\\"set-ver\\\", \\\"major\\\", \\\"minor\\\", \\\"patch\\\"])\\n /// .required(true))\\n /// .try_get_matches_from(vec![\\\"app\\\", \\\"--major\\\", \\\"--patch\\\"]);\\n /// // Because we used two args in the group it's an error\\n\", \"diff --git a/.github/workflows/ibis-docs-lint.yml b/.github/workflows/ibis-docs-lint.yml\\nindex 57d94a4..04de03b 100644\\n--- a/.github/workflows/ibis-docs-lint.yml\\n+++ b/.github/workflows/ibis-docs-lint.yml\\n@@ -206,7 +206,7 @@ jobs:\\n - name: build and push dev docs\\n run: |\\n nix develop --ignore-environment -c \\\\\\n- mkdocs gh-deploy --message 'docs: ibis@${{ github.sha }}'\\n+ mkdocs gh-deploy --message 'docs: ibis@${{ github.sha }}' --ignore-version\\n \\n simulate_release:\\n runs-on: ubuntu-latest\\n\", \"diff --git a/ui/package.json b/ui/package.json\\nindex 7a44aad..a36fc3d 100644\\n--- a/ui/package.json\\n+++ b/ui/package.json\\n@@ -134,7 +134,7 @@\\n \\\"dependencies\\\": {\\n \\\"@influxdata/clockface\\\": \\\"2.3.4\\\",\\n \\\"@influxdata/flux\\\": \\\"^0.5.1\\\",\\n- \\\"@influxdata/flux-lsp-browser\\\": \\\"0.5.20\\\",\\n+ \\\"@influxdata/flux-lsp-browser\\\": \\\"0.5.21\\\",\\n \\\"@influxdata/giraffe\\\": \\\"0.29.0\\\",\\n \\\"@influxdata/influx\\\": \\\"0.5.5\\\",\\n \\\"@influxdata/influxdb-templates\\\": \\\"0.9.0\\\",\\ndiff --git a/ui/yarn.lock b/ui/yarn.lock\\nindex 99ae766..e6e2a47 100644\\n--- a/ui/yarn.lock\\n+++ b/ui/yarn.lock\\n@@ -752,10 +752,10 @@\\n resolved \\\"https://registry.yarnpkg.com/@influxdata/clockface/-/clockface-2.3.4.tgz#9c496601253e1d49cbeae29a7b9cfb54862785f6\\\"\\n integrity sha512-mmz3YElK8Ho+1onEafuas6sVhIT638JA4NbDTO3bVJgK1TG7AnU4rQP+c6fj7vZSfvrIwtOwGaMONJTaww5o6w==\\n \\n-\\\"@influxdata/flux-lsp-browser@0.5.20\\\":\\n- version \\\"0.5.20\\\"\\n- resolved \\\"https://registry.yarnpkg.com/@influxdata/flux-lsp-browser/-/flux-lsp-browser-0.5.20.tgz#150d261bab869e130f6d00ee73ea4e859e8969e4\\\"\\n- integrity sha512-gUy19t/QndkJPmyv7Lb56zXxaW5v7R9TslTHt0hB0GJjo7lmYkRfkD7DELdFHrD2e/CLtcNQBnczIMIGkII8Bw==\\n+\\\"@influxdata/flux-lsp-browser@0.5.21\\\":\\n+ version \\\"0.5.21\\\"\\n+ resolved \\\"https://registry.yarnpkg.com/@influxdata/flux-lsp-browser/-/flux-lsp-browser-0.5.21.tgz#d5632f45e925c09bae9501a00fbef2ed55567f9e\\\"\\n+ integrity sha512-lcUwKX1yj0QqGiusQFOVi7UPsvp6+qNX7Cwf9qqS5/dRwoh7c++nFVRdGNrSWlsbyRrPaAWBoZWEnghSnIf6DQ==\\n \\n \\\"@influxdata/flux@^0.5.1\\\":\\n version \\\"0.5.1\\\"\\n\", \"diff --git a/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java b/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\\nindex d8f4d89..e54e85a 100644\\n--- a/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\\n+++ b/logstreams/src/main/java/io/zeebe/logstreams/impl/snapshot/fs/FsSnapshotStorageConfiguration.java\\n@@ -23,8 +23,9 @@ public class FsSnapshotStorageConfiguration\\n {\\n protected static final String CHECKSUM_ALGORITHM = \\\"SHA1\\\";\\n \\n- protected static final String SNAPSHOT_FILE_NAME_TEMPLATE = \\\"%s\\\" + File.separatorChar + \\\"%s-%d.snapshot\\\";\\n- protected static final String SNAPSHOT_FILE_NAME_PATTERN = \\\"%s-(\\\\\\\\d+).snapshot\\\";\\n+ protected static final String SNAPSHOT_FILE_NAME_TEMPLATE = \\\"%s-%d.snapshot\\\";\\n+ protected static final String SNAPSHOT_FILE_PATH_TEMPLATE = \\\"%s\\\" + File.separatorChar + SNAPSHOT_FILE_NAME_TEMPLATE;\\n+ protected static final String SNAPSHOT_FILE_NAME_PATTERN = \\\"%s-(\\\\\\\\d+)\\\\\\\\.snapshot\\\";\\n \\n protected static final String CHECKSUM_FILE_NAME_TEMPLATE = \\\"%s\\\" + File.separatorChar + \\\"%s-%d.\\\" + CHECKSUM_ALGORITHM.toLowerCase();\\n \\n@@ -50,7 +51,7 @@ public class FsSnapshotStorageConfiguration\\n \\n public String snapshotFileName(String name, long logPosition)\\n {\\n- return String.format(SNAPSHOT_FILE_NAME_TEMPLATE, rootPath, name, logPosition);\\n+ return String.format(SNAPSHOT_FILE_PATH_TEMPLATE, rootPath, name, logPosition);\\n }\\n \\n public String checksumFileName(String name, long logPosition)\\n@@ -86,7 +87,7 @@ public class FsSnapshotStorageConfiguration\\n return String.format(CHECKSUM_CONTENT_TEMPLATE, checksum, dataFileName);\\n }\\n \\n- public String extractDigetsFromChecksumContent(String content)\\n+ public String extractDigestFromChecksumContent(String content)\\n {\\n final int indexOfSeparator = content.indexOf(CHECKSUM_CONTENT_SEPARATOR);\\n if (indexOfSeparator < 0)\\n@@ -108,9 +109,18 @@ public class FsSnapshotStorageConfiguration\\n return content.substring(indexOfSeparator + CHECKSUM_CONTENT_SEPARATOR.length());\\n }\\n \\n+ public String getSnapshotNameFromFileName(final String fileName)\\n+ {\\n+ final String suffixPattern = String.format(SNAPSHOT_FILE_NAME_PATTERN, \\\"\\\");\\n+ final Pattern pattern = Pattern.compile(suffixPattern);\\n+ final String[] parts = pattern.split(fileName);\\n+\\n+ return parts[0];\\n+ }\\n+\\n public String getSnapshotFileNameTemplate()\\n {\\n- return SNAPSHOT_FILE_NAME_TEMPLATE;\\n+ return SNAPSHOT_FILE_PATH_TEMPLATE;\\n }\\n \\n public String getChecksumFileNameTemplate()\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"990699ff4a84a5bac3abfecbec002f30e2714de9\", \"9849430b11b92ae58d94cfe4d0b06313c7eab550\", \"21228c55b7045d9b2225f65e6231184ff332b071\", \"bfe32bf10e9b6d699f694fbd095af0b3f2e6275f\", \"7ab965c55d0e98fdb6179577d0db56599675e400\"]"},"types":{"kind":"string","value":"[\"test\", \"docs\", \"ci\", \"build\", \"feat\"]"}}},{"rowIdx":1373,"cells":{"commit_message":{"kind":"string","value":"add `to_sql`\n\nCo-authored-by: Gil Forsyth ,add tests for ProfilePage methods,simplyfy statement,Add the select function for logicflow,abort parallel stages if one failed"},"diff":{"kind":"string","value":"[\"diff --git a/docs/api/expressions/top_level.md b/docs/api/expressions/top_level.md\\nindex efaffbd..34b529e 100644\\n--- a/docs/api/expressions/top_level.md\\n+++ b/docs/api/expressions/top_level.md\\n@@ -28,7 +28,7 @@ These methods and objects are available directly in the `ibis` module.\\n ::: ibis.or_\\n ::: ibis.param\\n ::: ibis.show_sql\\n-::: ibis.sql\\n+::: ibis.to_sql\\n ::: ibis.random\\n ::: ibis.range_window\\n ::: ibis.row_number\\n\", \"diff --git a/client/src/components/Profile/PreScreeningIviewCard.tsx b/client/src/components/Profile/PreScreeningIviewCard.tsx\\nindex f84392a..2031203 100644\\n--- a/client/src/components/Profile/PreScreeningIviewCard.tsx\\n+++ b/client/src/components/Profile/PreScreeningIviewCard.tsx\\n@@ -27,7 +27,7 @@ type State = {\\n isPreScreeningIviewModalVisible: boolean;\\n };\\n \\n-class CoreJSIviewsCard extends React.PureComponent {\\n+class PreScreeningIviewsCard extends React.PureComponent {\\n state = {\\n courseIndex: 0,\\n isPreScreeningIviewModalVisible: false,\\n@@ -98,4 +98,4 @@ class CoreJSIviewsCard extends React.PureComponent {\\n }\\n }\\n \\n-export default CoreJSIviewsCard;\\n+export default PreScreeningIviewsCard;\\ndiff --git a/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap b/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\\nindex 7b73c3f..54b378c 100644\\n--- a/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\\n+++ b/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\\n@@ -1,7 +1,7 @@\\n // Jest Snapshot v1, https://goo.gl/fbAQLP\\n \\n exports[`PreScreeningIviewCard Should render correctly 1`] = `\\n-\\n \\n \\n-\\n+\\n `;\\ndiff --git a/client/src/pages/profile/__tests__/ProfilePage.tests.tsx b/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\\nindex 079d966..95f3e49 100644\\n--- a/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\\n+++ b/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\\n@@ -4,7 +4,6 @@ import { shallowToJson } from 'enzyme-to-json';\\n import { NextRouter } from 'next/router';\\n import { Session } from 'components/withSession';\\n import { ProfilePage } from '../index';\\n-// import { GeneralInfo } from '../../../../../common/models/profile';\\n \\n jest.mock('next/config', () => () => ({}));\\n jest.mock('services/user', () => ({\\n@@ -12,80 +11,378 @@ jest.mock('services/user', () => ({\\n getProfileInfo() {\\n return jest.fn();\\n }\\n+ saveProfileInfo() {\\n+ return jest.fn();\\n+ }\\n },\\n }),\\n );\\n \\n-describe('ProfilePage', () => {\\n- const profile = {\\n- generalInfo: {\\n- name: 'Dzmitry Petrov',\\n- githubId: 'petrov',\\n- aboutMyself: 'Test',\\n+const profile = {\\n+ permissionsSettings: {\\n+ isProfileVisible: { all: true },\\n+ isAboutVisible: { mentor: true, student: false, all: false },\\n+ isEducationVisible: { mentor: true, student: false, all: false },\\n+ isEnglishVisible: { student: false, all: false },\\n+ isEmailVisible: { student: false, all: false },\\n+ isTelegramVisible: { student: false, all: false },\\n+ isSkypeVisible: { student: false, all: false },\\n+ isPhoneVisible: { student: false, all: false },\\n+ isContactsNotesVisible: { student: true, all: false },\\n+ isLinkedInVisible: { mentor: true, student: false, all: false },\\n+ isPublicFeedbackVisible: { mentor: true, student: true, all: false },\\n+ isMentorStatsVisible: { mentor: true, student: true, all: false },\\n+ isStudentStatsVisible: { student: false, all: false },\\n+ },\\n+ generalInfo: {\\n+ aboutMyself: 'Test',\\n+ educationHistory: [{\\n+ graduationYear: '2019',\\n+ faculty: 'TT',\\n+ university: 'Test',\\n+ }],\\n+ englishLevel: 'a2+',\\n+ locationId: 456,\\n+ locationName: 'Brest',\\n+ },\\n+ contacts: {},\\n+ mentorStats: [\\n+ {},\\n+ ],\\n+ studentStats: [\\n+ {\\n+ courseFullName: 'test',\\n+ courseName: 'test',\\n locationName: 'Minsk',\\n- locationId: '1',\\n- educationHistory: null,\\n- englishLevel: 'a2+',\\n- },\\n- permissionsSettings: {\\n- isProfileVisible: { all: true },\\n- isAboutVisible: { mentor: true, student: false, all: false },\\n- isEducationVisible: { mentor: true, student: false, all: false },\\n- isEnglishVisible: { student: false, all: false },\\n- isEmailVisible: { student: false, all: false },\\n- isTelegramVisible: { student: false, all: false },\\n- isSkypeVisible: { student: false, all: false },\\n- isPhoneVisible: { student: false, all: false },\\n- isContactsNotesVisible: { student: true, all: false },\\n- isLinkedInVisible: { mentor: true, student: false, all: false },\\n- isPublicFeedbackVisible: { mentor: true, student: true, all: false },\\n- isMentorStatsVisible: { mentor: true, student: true, all: false },\\n- isStudentStatsVisible: { student: false, all: false },\\n- },\\n- contacts: {\\n- phone: '+375292123456',\\n- email: 'petro@gmail.com',\\n- skype: 'petro:live',\\n- telegram: 'petro',\\n- notes: 'discord: @petro, instagram: @petro12',\\n- },\\n- isPermissionsSettingsChanged: true,\\n- isProfileSettingsChanged: true,\\n- };\\n- const session = {\\n- id: 2020,\\n- githubId: 'mikhama',\\n- isAdmin: true,\\n- isHirer: false,\\n- isActivist: false,\\n- roles: {\\n- 1: 'mentor',\\n- 2: 'student',\\n- 11: 'mentor',\\n- },\\n- coursesRoles: {\\n- 13: [\\n- 'manager',\\n+ tasks: [\\n+ {\\n+ interviewFormAnswers: {},\\n+ },\\n ],\\n },\\n- } as Session;\\n- const router = {\\n- query: {\\n- githubId: 'petrov',\\n- },\\n- asPath: '/#edit/',\\n- } as unknown as NextRouter;\\n+ ],\\n+ publicFeedback: [\\n+ {},\\n+ ],\\n+ stageInterviewFeedback: [\\n+ {},\\n+ ],\\n+};\\n+const session = {\\n+ id: 2020,\\n+ githubId: 'mikhama',\\n+ isAdmin: true,\\n+ isHirer: false,\\n+ isActivist: false,\\n+ roles: {\\n+ 1: 'mentor',\\n+ 2: 'student',\\n+ 11: 'mentor',\\n+ },\\n+ coursesRoles: {\\n+ 13: [\\n+ 'manager',\\n+ ],\\n+ },\\n+} as Session;\\n+const router = {\\n+ query: {\\n+ githubId: 'petrov',\\n+ },\\n+ asPath: '/#edit/',\\n+} as unknown as NextRouter;\\n+const state = {\\n+ profile,\\n+ isInitialPermissionsSettingsChanged: false,\\n+ isInitialProfileSettingsChanged: false,\\n+};\\n \\n+describe('ProfilePage', () => {\\n describe('Should render correctly', () => {\\n- it('if full info about profile is in the state', () => {\\n+ it('if full profile info is in the state', () => {\\n const wrapper = shallow(\\n ,\\n );\\n- wrapper.setState({ profile });\\n+ wrapper.setState(state);\\n expect(shallowToJson(wrapper)).toMatchSnapshot();\\n });\\n });\\n+\\n+ const wrapper = shallow(\\n+ ,\\n+ );\\n+ const instance = wrapper.instance();\\n+ describe('onPermissionsSettingsChange', () => {\\n+ describe('Should set state correctly', () => {\\n+ it('if permissions for student role were changed', async () => {\\n+ const event = {\\n+ target: {\\n+ checked: true,\\n+ },\\n+ }\\n+ const changedPermissionsSettings = {\\n+ permissionName: 'isEmailVisible',\\n+ role: 'student',\\n+ };\\n+ wrapper.setState(state);\\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\\n+ expect(wrapper.state().profile.permissionsSettings.isEmailVisible).toEqual({\\n+ student: true, all: false,\\n+ });\\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\\n+ });\\n+ it('if permissions for mentor role were changed', async () => {\\n+ const event = {\\n+ target: {\\n+ checked: false,\\n+ },\\n+ }\\n+ const changedPermissionsSettings = {\\n+ permissionName: 'isLinkedInVisible',\\n+ role: 'mentor',\\n+ };\\n+ wrapper.setState(state);\\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\\n+ expect(wrapper.state().profile.permissionsSettings.isLinkedInVisible).toEqual({\\n+ mentor: false, student: false, all: false,\\n+ });\\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\\n+ });\\n+ it('if permissions for all roles were changed', async () => {\\n+ const event = {\\n+ target: {\\n+ checked: true,\\n+ },\\n+ }\\n+ const changedPermissionsSettings = {\\n+ permissionName: 'isEducationVisible',\\n+ role: 'all',\\n+ };\\n+ wrapper.setState(state);\\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\\n+ expect(wrapper.state().profile.permissionsSettings.isEducationVisible).toEqual({\\n+ mentor: true, student: true, all: true,\\n+ });\\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\\n+ });\\n+ });\\n+ });\\n+ describe('onProfileSettingsChange', () => {\\n+ describe('Should set state correctly', () => {\\n+ it('if \\\"profile.generalInfo.location\\\" was changed', async () => {\\n+ const event = {\\n+ id: 123,\\n+ name: 'Minsk',\\n+ }\\n+ const path = 'generalInfo.location';\\n+ wrapper.setState(state);\\n+ await instance.onProfileSettingsChange(event, path);\\n+ expect(wrapper.state().profile.generalInfo.locationId).toBe(123);\\n+ expect(wrapper.state().profile.generalInfo.locationName).toBe('Minsk');\\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\\n+ });\\n+ it('if \\\"profile.generalInfo.englishLevel\\\" was changed', async () => {\\n+ const event = 'b2+';\\n+ const path = 'generalInfo.englishLevel';\\n+ wrapper.setState(state);\\n+ await instance.onProfileSettingsChange(event, path);\\n+ expect(wrapper.state().profile.generalInfo.englishLevel).toBe('b2+');\\n+ });\\n+ it('if field added to \\\"profile.generalInfo.educationHistory\\\"', async () => {\\n+ const event = {\\n+ type: 'add',\\n+ };\\n+ const path = 'generalInfo.educationHistory';\\n+ wrapper.setState(state);\\n+ await instance.onProfileSettingsChange(event, path);\\n+ expect(wrapper.state().profile.generalInfo.educationHistory).toEqual([\\n+ {\\n+ graduationYear: '2019',\\n+ faculty: 'TT',\\n+ university: 'Test',\\n+ },\\n+ {\\n+ graduationYear: null,\\n+ faculty: null,\\n+ university: null,\\n+ },\\n+ ]);\\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\\n+ });\\n+ it('if field deleted from \\\"profile.generalInfo.educationHistory\\\"', async () => {\\n+ const event = {\\n+ type: 'delete',\\n+ index: 0,\\n+ };\\n+ const path = 'generalInfo.educationHistory';\\n+ wrapper.setState(state);\\n+ await instance.onProfileSettingsChange(event, path);\\n+ expect(wrapper.state().profile.generalInfo.educationHistory).toEqual([]);\\n+ });\\n+ it('if some other field was changed', async () => {\\n+ const event = {\\n+ target: {\\n+ value: 'Hello everyone, my name is Mike.',\\n+ }\\n+ };\\n+ const path = 'generalInfo.aboutMyself';\\n+ wrapper.setState(state);\\n+ await instance.onProfileSettingsChange(event, path);\\n+ expect(wrapper.state().profile.generalInfo.aboutMyself).toEqual('Hello everyone, my name is Mike.');\\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\\n+ });\\n+ });\\n+ });\\n+ describe('changeProfilePageMode', () => {\\n+ describe('Should set state correctly', () => {\\n+ it('if mode = \\\"edit\\\" was passed', async () => {\\n+ const mode = 'edit';\\n+ wrapper.setState({ ...state, isEditingModeEnabled: false });\\n+ expect(wrapper.state().isEditingModeEnabled).toBe(false);\\n+ await instance.changeProfilePageMode(mode);\\n+ expect(wrapper.state().isEditingModeEnabled).toBe(true);\\n+ });\\n+ it('if mode = \\\"view\\\" was passed', async () => {\\n+ const mode = 'view';\\n+ wrapper.setState({ ...state, isEditingModeEnabled: true });\\n+ expect(wrapper.state().isEditingModeEnabled).toBe(true);\\n+ await instance.changeProfilePageMode(mode);\\n+ expect(wrapper.state().isEditingModeEnabled).toBe(false);\\n+ });\\n+ });\\n+ });\\n+ describe('saveProfile', () => {\\n+ it('Should set state correctly', async () => {\\n+ const profile = {\\n+ generalInfo: {\\n+ aboutMyself: 'Hello',\\n+ educationHistory: [{\\n+ graduationYear: '2019',\\n+ faculty: 'TT',\\n+ university: 'Test',\\n+ }],\\n+ englishLevel: 'c1',\\n+ locationId: 778,\\n+ locationName: 'Hrodna',\\n+ },\\n+ contacts: {\\n+ telegram: 'test',\\n+ },\\n+ permissionsSettings: {\\n+ isProfileVisible: { all: true },\\n+ isAboutVisible: { mentor: true, student: false, all: false },\\n+ isEducationVisible: { mentor: true, student: false, all: false },\\n+ isEnglishVisible: { student: true, all: true },\\n+ isEmailVisible: { student: true, all: true },\\n+ isTelegramVisible: { student: true, all: true },\\n+ isSkypeVisible: { student: true, all: false },\\n+ isPhoneVisible: { student: true, all: false },\\n+ isContactsNotesVisible: { student: true, all: false },\\n+ isLinkedInVisible: { mentor: true, student: false, all: false },\\n+ isPublicFeedbackVisible: { mentor: true, student: true, all: false },\\n+ isMentorStatsVisible: { mentor: true, student: true, all: false },\\n+ isStudentStatsVisible: { student: false, all: false },\\n+ },\\n+ };\\n+ wrapper.setState({\\n+ ...state,\\n+ profile,\\n+ isInitialPermissionsSettingsChanged: true,\\n+ isInitialProfileSettingsChanged: true,\\n+ });\\n+ await instance.saveProfile();\\n+ expect(wrapper.state().isSaving).toBe(false);\\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(false);\\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(false);\\n+ expect(wrapper.state().initialPermissionsSettings).toEqual(profile.permissionsSettings);\\n+ expect(wrapper.state().initialProfileSettings).toEqual(profile);\\n+ });\\n+ });\\n+ describe('hadStudentCoreJSInterview', () => {\\n+ describe('Should return', () => {\\n+ it('\\\"true\\\" if student has an \\\"interviewFormAnswers\\\" in one of the task', () => {\\n+ const studentStats = [\\n+ {\\n+ courseFullName: 'test',\\n+ courseName: 'test',\\n+ locationName: 'Minsk',\\n+ tasks: [\\n+ {},\\n+ {\\n+ interviewFormAnswers: {},\\n+ },\\n+ {},\\n+ {},\\n+ ],\\n+ },\\n+ ];\\n+ const result = instance.hadStudentCoreJSInterview(studentStats);\\n+ expect(result).toBe(true);\\n+ });\\n+ it('\\\"false\\\" if student has not an \\\"interviewFormAnswers\\\" in one of the task', () => {\\n+ const studentStats = [\\n+ {\\n+ courseFullName: 'test',\\n+ courseName: 'test',\\n+ locationName: 'Minsk',\\n+ tasks: [\\n+ {},\\n+ {},\\n+ {},\\n+ ],\\n+ },\\n+ ];\\n+ const result = instance.hadStudentCoreJSInterview(studentStats);\\n+ expect(result).toBe(false);\\n+ });\\n+ });\\n+ });\\n+ describe('getStudentCoreJSInterviews', () => {\\n+ it('Should return info about CoreJS interviews', () => {\\n+ const studentStats = [\\n+ {\\n+ courseFullName: 'test',\\n+ courseName: 'test',\\n+ locationName: 'Minsk',\\n+ tasks: [\\n+ {},\\n+ {},\\n+ {\\n+ interviewer: {\\n+ name: 'Dima Petrov',\\n+ githubId: 'dip',\\n+ },\\n+ comment: 'Test',\\n+ score: 9,\\n+ interviewFormAnswers: {},\\n+ },\\n+ {},\\n+ ],\\n+ },\\n+ ];\\n+ const result = instance.getStudentCoreJSInterviews(studentStats);\\n+ expect(result).toEqual([\\n+ {\\n+ courseFullName: 'test',\\n+ courseName: 'test',\\n+ interview: {\\n+ answers: {},\\n+ interviewer: {\\n+ name: 'Dima Petrov',\\n+ githubId: 'dip',\\n+ },\\n+ comment: 'Test',\\n+ score: 9,\\n+ },\\n+ locationName: 'Minsk',\\n+ },\\n+ ]);\\n+ });\\n+ });\\n });\\ndiff --git a/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap b/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\\nindex fbd133c..729b2de 100644\\n--- a/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\\n+++ b/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\\n@@ -1,6 +1,6 @@\\n // Jest Snapshot v1, https://goo.gl/fbAQLP\\n \\n-exports[`ProfilePage Should render correctly if 1`] = `\\n+exports[`ProfilePage Should render correctly if full profile info is in the state 1`] = `\\n \\n \\n \\n
\\n- \\n- \\n- div.jsx-3803498300{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;margin-left:-16px;width:auto;}\\n- \\n- \\n- div.jsx-110195169{padding-left:16px;background-clip:padding-box;}\\n- \\n-
\\n- \\n- \\n-\\n-`;\\n-\\n-exports[`ProfilePage Should render correctly if full info about profile is in the state 1`] = `\\n-\\n- \\n- \\n- \\n- \\n- \\n \\n- \\n \\n \\n- \\n \\n \\n- \\n \\n \\n- \\n \\n \\n- \\n \\n\", \"diff --git a/src/Object/Merge.ts b/src/Object/Merge.ts\\nindex 1f48efb..06caad1 100644\\n--- a/src/Object/Merge.ts\\n+++ b/src/Object/Merge.ts\\n@@ -96,9 +96,11 @@ type ChooseMergeDeep =\\n- Or, Extends<[O1], [never]>> extends 1 // filter never\\n+ [O] extends [never]\\n ? MergeProp\\n- : LibStyle, NoList, K, OOK, style>, O, O1, style>\\n+ : [O1] extends [never]\\n+ ? MergeProp\\n+ : LibStyle, NoList, K, OOK, style>, O, O1, style>\\n \\n /**\\n @hidden\\ndiff --git a/src/Object/Patch.ts b/src/Object/Patch.ts\\nindex 2d73784..2c8bd42 100644\\n--- a/src/Object/Patch.ts\\n+++ b/src/Object/Patch.ts\\n@@ -89,9 +89,11 @@ type ChoosePatchDeep =\\n- Or, Extends<[O1], [never]>> extends 1 // filter never\\n+ [O] extends [never]\\n ? PatchProp\\n- : LibStyle, NoList, K, OOK, style>, O, O1, style>\\n+ : [O1] extends [never]\\n+ ? PatchProp\\n+ : LibStyle, NoList, K, OOK, style>, O, O1, style>\\n \\n /**\\n @hidden\\n\", \"diff --git a/packages/core/src/LogicFlow.tsx b/packages/core/src/LogicFlow.tsx\\nindex 0d913b7..dcc59b3 100644\\n--- a/packages/core/src/LogicFlow.tsx\\n+++ b/packages/core/src/LogicFlow.tsx\\n@@ -276,6 +276,12 @@ export default class LogicFlow {\\n this.translate(-TRANSLATE_X, -TRANSLATE_Y);\\n }\\n /**\\n+ * \\u5c06\\u56fe\\u5f62\\u9009\\u4e2d\\n+ */\\n+ select(id: string) {\\n+ this.graphModel.selectElementById(id);\\n+ }\\n+ /**\\n * \\u5c06\\u56fe\\u5f62\\u5b9a\\u4f4d\\u5230\\u753b\\u5e03\\u4e2d\\u5fc3\\n * @param focusOnArgs \\u652f\\u6301\\u7528\\u6237\\u4f20\\u5165\\u56fe\\u5f62\\u5f53\\u524d\\u7684\\u5750\\u6807\\u6216id\\uff0c\\u53ef\\u4ee5\\u901a\\u8fc7type\\u6765\\u533a\\u5206\\u662f\\u8282\\u70b9\\u8fd8\\u662f\\u8fde\\u7ebf\\u7684id\\uff0c\\u4e5f\\u53ef\\u4ee5\\u4e0d\\u4f20\\uff08\\u515c\\u5e95\\uff09\\n */\\ndiff --git a/packages/core/src/model/GraphModel.ts b/packages/core/src/model/GraphModel.ts\\nindex 94d0899..10280a9 100644\\n--- a/packages/core/src/model/GraphModel.ts\\n+++ b/packages/core/src/model/GraphModel.ts\\n@@ -481,6 +481,13 @@ class GraphModel {\\n this.selectElement?.setSelected(true);\\n }\\n \\n+ @action\\n+ selectElementById(id: string) {\\n+ this.selectElement?.setSelected(false);\\n+ this.selectElement = this.getElement(id) as BaseNodeModel | BaseEdgeModel;\\n+ this.selectElement?.setSelected(true);\\n+ }\\n+\\n /* \\u4fee\\u6539\\u8fde\\u7ebf\\u7c7b\\u578b */\\n @action\\n changeEdgeType(type: string): void {\\n\", \"diff --git a/Jenkinsfile b/Jenkinsfile\\nindex 168f446..a4da961 100644\\n--- a/Jenkinsfile\\n+++ b/Jenkinsfile\\n@@ -28,6 +28,7 @@ pipeline {\\n }\\n \\n stage('Verify') {\\n+ failFast true\\n parallel {\\n stage('Tests') {\\n steps {\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"e2821a56c7d867b8b591f1777019843a2ffca797\", \"11ffd5174bd61a2939ae58d2b2d43284302ae490\", \"f86944ff00b970d7e2da48abbff43e58bdf29b99\", \"6ae067153cd2608018fd3da76bd6d00a08da4b3a\", \"28e623b294816c4e070971782a75c8697a11966f\"]"},"types":{"kind":"string","value":"[\"docs\", \"test\", \"refactor\", \"feat\", \"ci\"]"}}},{"rowIdx":1374,"cells":{"commit_message":{"kind":"string","value":"fetch git history,remove unnecessary spotless definition\n\nIt receives this already from the parent pom.,Handle different events.,fix unstable MessageCorrelationTest,add LICENSE"},"diff":{"kind":"string","value":"[\"diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml\\nindex 6726e35..9114eeb 100644\\n--- a/.github/workflows/ci.yml\\n+++ b/.github/workflows/ci.yml\\n@@ -172,6 +172,8 @@ jobs:\\n \\n steps:\\n - uses: actions/checkout@v3\\n+ with:\\n+ fetch-depth: 0\\n - run: corepack enable\\n - uses: actions/setup-node@v3\\n with:\\ndiff --git a/scripts/bump-edge.ts b/scripts/bump-edge.ts\\nindex aa33d76..1d1cbc9 100644\\n--- a/scripts/bump-edge.ts\\n+++ b/scripts/bump-edge.ts\\n@@ -18,7 +18,7 @@ async function main () {\\n \\n const config = await loadChangelogConfig(process.cwd())\\n \\n- const latestTag = execaSync('git', ['describe', '--tags', '--abbrev=0', 'main']).stdout\\n+ const latestTag = execaSync('git', ['describe', '--tags', '--abbrev=0']).stdout\\n \\n const commits = await getGitDiff(latestTag)\\n const bumpType = determineSemverChange(parseCommits(commits, config), config)\\n\", \"diff --git a/benchmarks/project/pom.xml b/benchmarks/project/pom.xml\\nindex 62030b6..ab87dea 100644\\n--- a/benchmarks/project/pom.xml\\n+++ b/benchmarks/project/pom.xml\\n@@ -123,11 +123,6 @@\\n \\n \\n \\n- com.diffplug.spotless\\n- spotless-maven-plugin\\n- \\n-\\n- \\n org.apache.maven.plugins\\n maven-shade-plugin\\n \\n\", \"diff --git a/src/notebook/epics/kernel-launch.js b/src/notebook/epics/kernel-launch.js\\nindex 9075d7c..9f16e67 100644\\n--- a/src/notebook/epics/kernel-launch.js\\n+++ b/src/notebook/epics/kernel-launch.js\\n@@ -113,6 +113,12 @@ export function newKernelObservable(kernelSpec: KernelInfo, cwd: string) {\\n observer.error({ type: 'ERROR', payload: error, err: true });\\n observer.complete();\\n });\\n+ spawn.on('exit', () => {\\n+ observer.complete();\\n+ });\\n+ spawn.on('disconnect', () => {\\n+ observer.complete();\\n+ });\\n });\\n });\\n }\\n\", \"diff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\\nindex 0f5fed9..796393c 100644\\n--- a/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/MessageCorrelationTest.java\\n@@ -27,7 +27,6 @@ import static io.zeebe.test.util.MsgPackUtil.asMsgPack;\\n import static org.assertj.core.api.Assertions.assertThat;\\n import static org.assertj.core.api.Assertions.entry;\\n \\n-import io.zeebe.UnstableTest;\\n import io.zeebe.broker.test.EmbeddedBrokerRule;\\n import io.zeebe.model.bpmn.Bpmn;\\n import io.zeebe.model.bpmn.BpmnModelInstance;\\n@@ -50,7 +49,6 @@ import org.agrona.DirectBuffer;\\n import org.junit.Before;\\n import org.junit.Rule;\\n import org.junit.Test;\\n-import org.junit.experimental.categories.Category;\\n import org.junit.rules.RuleChain;\\n import org.junit.runner.RunWith;\\n import org.junit.runners.Parameterized;\\n@@ -165,7 +163,7 @@ public class MessageCorrelationTest {\\n \\\"receive-message\\\", WorkflowInstanceIntent.ELEMENT_ACTIVATED);\\n \\n final SubscribedRecord messageSubscription =\\n- findMessageSubscription(testClient, MessageSubscriptionIntent.OPENED);\\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\\n assertThat(messageSubscription.valueType()).isEqualTo(ValueType.MESSAGE_SUBSCRIPTION);\\n assertThat(messageSubscription.recordType()).isEqualTo(RecordType.EVENT);\\n assertThat(messageSubscription.value())\\n@@ -244,7 +242,7 @@ public class MessageCorrelationTest {\\n final long workflowInstanceKey =\\n testClient.createWorkflowInstance(\\\"wf\\\", asMsgPack(\\\"orderId\\\", \\\"order-123\\\"));\\n \\n- testClient.receiveFirstWorkflowInstanceEvent(WorkflowInstanceIntent.ELEMENT_ACTIVATED);\\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\\n \\n // when\\n testClient.publishMessage(\\\"order canceled\\\", \\\"order-123\\\", asMsgPack(\\\"foo\\\", \\\"bar\\\"));\\n@@ -308,13 +306,12 @@ public class MessageCorrelationTest {\\n }\\n \\n @Test\\n- @Category(UnstableTest.class) // => https://github.com/zeebe-io/zeebe/issues/1234\\n public void shouldCorrelateMessageWithZeroTTL() throws Exception {\\n // given\\n final long workflowInstanceKey =\\n testClient.createWorkflowInstance(\\\"wf\\\", asMsgPack(\\\"orderId\\\", \\\"order-123\\\"));\\n \\n- testClient.receiveElementInState(\\\"receive-message\\\", WorkflowInstanceIntent.ELEMENT_ACTIVATED);\\n+ findMessageSubscription(MessageSubscriptionIntent.OPENED);\\n \\n // when\\n testClient.publishMessage(\\\"order canceled\\\", \\\"order-123\\\", asMsgPack(\\\"foo\\\", \\\"bar\\\"), 0);\\n@@ -499,10 +496,9 @@ public class MessageCorrelationTest {\\n .containsEntry(\\\"activityInstanceKey\\\", catchEventEntered.key());\\n }\\n \\n- private SubscribedRecord findMessageSubscription(\\n- final TestPartitionClient client, final MessageSubscriptionIntent intent)\\n+ private SubscribedRecord findMessageSubscription(final MessageSubscriptionIntent intent)\\n throws AssertionError {\\n- return client\\n+ return testClient\\n .receiveEvents()\\n .filter(intent(intent))\\n .findFirst()\\n\", \"diff --git a/LICENSE b/LICENSE\\nnew file mode 100644\\nindex 0000000..005581d\\n--- /dev/null\\n+++ b/LICENSE\\n@@ -0,0 +1,21 @@\\n+MIT License\\n+\\n+Copyright (c) Hassan El Mghari\\n+\\n+Permission is hereby granted, free of charge, to any person obtaining a copy\\n+of this software and associated documentation files (the \\\"Software\\\"), to deal\\n+in the Software without restriction, including without limitation the rights\\n+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\\n+copies of the Software, and to permit persons to whom the Software is\\n+furnished to do so, subject to the following conditions:\\n+\\n+The above copyright notice and this permission notice shall be included in all\\n+copies or substantial portions of the Software.\\n+\\n+THE SOFTWARE IS PROVIDED \\\"AS IS\\\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\\n+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\\n+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\\n+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\\n+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\\n+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\\n+SOFTWARE.\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"e11d55a4922978b89a2c50bf577124b09449e89c\", \"7f9721dc9bbf66a3712d59352f64ca089da139f0\", \"a280a52c8309465276c3509848ddcddbe19732b6\", \"98bed2a8137930149559bc1cae9bd34a1a75e556\", \"096145f0d32a6b351b1db413b04a685952f04fb3\"]"},"types":{"kind":"string","value":"[\"ci\", \"build\", \"fix\", \"test\", \"docs\"]"}}},{"rowIdx":1375,"cells":{"commit_message":{"kind":"string","value":"`worktree::encode_to_worktree()` to turn UTf-8 into the worktree encoding.,add Expr.equals benchmark,release for ppc64\n\ncloses #3703\n\nSigned-off-by: Carlos A Becker ,avoid cancelling jobs,licensing"},"diff":{"kind":"string","value":"[\"diff --git a/gix-filter/src/worktree.rs b/gix-filter/src/worktree.rs\\ndeleted file mode 100644\\nindex cda7640..0000000\\n--- a/gix-filter/src/worktree.rs\\n+++ /dev/null\\n@@ -1,132 +0,0 @@\\n-//! Worktree encodings are powered by the `encoding_rs` crate, which has a narrower focus than the `iconv` library. Thus this implementation\\n-//! is inherently more limited but will handle the common cases.\\n-//! \\n-//! Note that for encoding to legacy formats, [additional normalization steps](https://docs.rs/encoding_rs/0.8.32/encoding_rs/#preparing-text-for-the-encoders)\\n-//! can be taken, which we do not yet take unless there is specific examples or problems to solve.\\n-\\n-use crate::clear_and_set_capacity;\\n-use crate::worktree::encode_to_git::RoundTrip;\\n-use encoding_rs::DecoderResult;\\n-\\n-///\\n-pub mod encoding {\\n- use bstr::BStr;\\n- use encoding_rs::Encoding;\\n-\\n- ///\\n- pub mod for_label {\\n- use bstr::BString;\\n-\\n- /// The error returned by [for_label()][super::for_label()].\\n- #[derive(Debug, thiserror::Error)]\\n- #[allow(missing_docs)]\\n- pub enum Error {\\n- #[error(\\\"An encoding named '{name}' is not known\\\")]\\n- Unknown { name: BString },\\n- }\\n- }\\n- /// Try to produce a new `Encoding` for `label` or report an error if it is not known.\\n- ///\\n- /// ### Deviation\\n- ///\\n- /// * There is no special handling of UTF-16LE/BE with checks if data contains a BOM or not, like `git` as we don't expect to have\\n- /// data available here.\\n- /// * Special `-BOM` suffixed versions of `UTF-16` encodings are not supported.\\n- pub fn for_label<'a>(label: impl Into<&'a BStr>) -> Result<&'static Encoding, for_label::Error> {\\n- let mut label = label.into();\\n- if label == \\\"latin-1\\\" {\\n- label = \\\"ISO-8859-1\\\".into();\\n- }\\n- let enc =\\n- Encoding::for_label(label.as_ref()).ok_or_else(|| for_label::Error::Unknown { name: label.into() })?;\\n- Ok(enc)\\n- }\\n-}\\n-\\n-///\\n-pub mod encode_to_git {\\n- /// Whether or not to perform round-trip checks.\\n- #[derive(Debug, Copy, Clone)]\\n- pub enum RoundTrip {\\n- /// Assure that we can losslessly convert the UTF-8 result back to the original encoding.\\n- Validate,\\n- /// Do not check if the encoding is round-trippable.\\n- Ignore,\\n- }\\n-\\n- /// The error returned by [`encode_to_git()][super::encode_to_git()].\\n- #[derive(Debug, thiserror::Error)]\\n- #[allow(missing_docs)]\\n- pub enum Error {\\n- #[error(\\\"Cannot convert input of {input_len} bytes to UTF-8 without overflowing\\\")]\\n- Overflow { input_len: usize },\\n- #[error(\\\"The input was malformed and could not be decoded as '{encoding}'\\\")]\\n- Malformed { encoding: &'static str },\\n- #[error(\\\"Encoding from '{src_encoding}' to '{dest_encoding}' and back is not the same\\\")]\\n- RoundTrip {\\n- src_encoding: &'static str,\\n- dest_encoding: &'static str,\\n- },\\n- }\\n-}\\n-\\n-/// Decode `src` according to `src_encoding` to `UTF-8` for storage in git.\\n-/// Note that the encoding is always applied, there is no conditional even if `src_encoding` already is `UTF-8`.\\n-pub fn encode_to_git(\\n- src: &[u8],\\n- src_encoding: &'static encoding_rs::Encoding,\\n- buf: &mut Vec,\\n- round_trip: encode_to_git::RoundTrip,\\n-) -> Result<(), encode_to_git::Error> {\\n- let mut decoder = src_encoding.new_decoder_with_bom_removal();\\n- let buf_len = decoder\\n- .max_utf8_buffer_length_without_replacement(src.len())\\n- .ok_or_else(|| encode_to_git::Error::Overflow { input_len: src.len() })?;\\n- clear_and_set_capacity(buf, buf_len);\\n- // SAFETY: `clear_and_set_capacity` assure that we have the given `buf_len` allocated, so setting its length is only making available\\n- // what is allocated. Later we will truncate to the amount of actually written bytes.\\n- #[allow(unsafe_code)]\\n- unsafe {\\n- buf.set_len(buf_len);\\n- }\\n- let (res, read, written) = decoder.decode_to_utf8_without_replacement(src, buf, true);\\n- match res {\\n- DecoderResult::InputEmpty => {\\n- assert!(\\n- buf_len >= written,\\n- \\\"encoding_rs estimates the maximum amount of bytes written correctly\\\"\\n- );\\n- assert_eq!(read, src.len(), \\\"input buffer should be fully consumed\\\");\\n- // SAFETY: we trust that `encoding_rs` reports this number correctly, and truncate everything else.\\n- #[allow(unsafe_code)]\\n- unsafe {\\n- buf.set_len(written);\\n- }\\n- }\\n- DecoderResult::OutputFull => {\\n- unreachable!(\\\"we assure that the output buffer is big enough as per the encoder's estimate\\\")\\n- }\\n- DecoderResult::Malformed(_, _) => {\\n- return Err(encode_to_git::Error::Malformed {\\n- encoding: src_encoding.name(),\\n- })\\n- }\\n- }\\n-\\n- match round_trip {\\n- RoundTrip::Validate => {\\n- // SAFETY: we trust `encoding_rs` to output valid UTF-8 only if we ask it to.\\n- #[allow(unsafe_code)]\\n- let str = unsafe { std::str::from_utf8_unchecked(&buf) };\\n- let (should_equal_src, _actual_encoding, _had_errors) = src_encoding.encode(str);\\n- if should_equal_src != src {\\n- return Err(encode_to_git::Error::RoundTrip {\\n- src_encoding: src_encoding.name(),\\n- dest_encoding: \\\"UTF-8\\\",\\n- });\\n- }\\n- }\\n- RoundTrip::Ignore => {}\\n- }\\n- Ok(())\\n-}\\ndiff --git a/gix-filter/src/worktree/encode_to_git.rs b/gix-filter/src/worktree/encode_to_git.rs\\nnew file mode 100644\\nindex 0000000..da1bbf7\\n--- /dev/null\\n+++ b/gix-filter/src/worktree/encode_to_git.rs\\n@@ -0,0 +1,90 @@\\n+/// Whether or not to perform round-trip checks.\\n+#[derive(Debug, Copy, Clone)]\\n+pub enum RoundTrip {\\n+ /// Assure that we can losslessly convert the UTF-8 result back to the original encoding.\\n+ Validate,\\n+ /// Do not check if the encoding is round-trippable.\\n+ Ignore,\\n+}\\n+\\n+/// The error returned by [`encode_to_git()][super::encode_to_git()].\\n+#[derive(Debug, thiserror::Error)]\\n+#[allow(missing_docs)]\\n+pub enum Error {\\n+ #[error(\\\"Cannot convert input of {input_len} bytes to UTF-8 without overflowing\\\")]\\n+ Overflow { input_len: usize },\\n+ #[error(\\\"The input was malformed and could not be decoded as '{encoding}'\\\")]\\n+ Malformed { encoding: &'static str },\\n+ #[error(\\\"Encoding from '{src_encoding}' to '{dest_encoding}' and back is not the same\\\")]\\n+ RoundTrip {\\n+ src_encoding: &'static str,\\n+ dest_encoding: &'static str,\\n+ },\\n+}\\n+\\n+pub(crate) mod function {\\n+ use super::{Error, RoundTrip};\\n+ use crate::clear_and_set_capacity;\\n+ use encoding_rs::DecoderResult;\\n+\\n+ /// Decode `src` according to `src_encoding` to `UTF-8` for storage in git and place it in `buf`.\\n+ /// Note that the encoding is always applied, there is no conditional even if `src_encoding` already is `UTF-8`.\\n+ pub fn encode_to_git(\\n+ src: &[u8],\\n+ src_encoding: &'static encoding_rs::Encoding,\\n+ buf: &mut Vec,\\n+ round_trip: RoundTrip,\\n+ ) -> Result<(), Error> {\\n+ let mut decoder = src_encoding.new_decoder_with_bom_removal();\\n+ let buf_len = decoder\\n+ .max_utf8_buffer_length_without_replacement(src.len())\\n+ .ok_or(Error::Overflow { input_len: src.len() })?;\\n+ clear_and_set_capacity(buf, buf_len);\\n+ // SAFETY: `clear_and_set_capacity` assure that we have the given `buf_len` allocated, so setting its length is only making available\\n+ // what is allocated. Later we will truncate to the amount of actually written bytes.\\n+ #[allow(unsafe_code)]\\n+ unsafe {\\n+ buf.set_len(buf_len);\\n+ }\\n+ let (res, read, written) = decoder.decode_to_utf8_without_replacement(src, buf, true);\\n+ match res {\\n+ DecoderResult::InputEmpty => {\\n+ assert!(\\n+ buf_len >= written,\\n+ \\\"encoding_rs estimates the maximum amount of bytes written correctly\\\"\\n+ );\\n+ assert_eq!(read, src.len(), \\\"input buffer should be fully consumed\\\");\\n+ // SAFETY: we trust that `encoding_rs` reports this number correctly, and truncate everything else.\\n+ #[allow(unsafe_code)]\\n+ unsafe {\\n+ buf.set_len(written);\\n+ }\\n+ }\\n+ DecoderResult::OutputFull => {\\n+ unreachable!(\\\"we assure that the output buffer is big enough as per the encoder's estimate\\\")\\n+ }\\n+ DecoderResult::Malformed(_, _) => {\\n+ return Err(Error::Malformed {\\n+ encoding: src_encoding.name(),\\n+ })\\n+ }\\n+ }\\n+\\n+ match round_trip {\\n+ RoundTrip::Validate => {\\n+ // SAFETY: we trust `encoding_rs` to output valid UTF-8 only if we ask it to.\\n+ #[allow(unsafe_code)]\\n+ let str = unsafe { std::str::from_utf8_unchecked(buf) };\\n+ let (should_equal_src, _actual_encoding, _had_errors) = src_encoding.encode(str);\\n+ if should_equal_src != src {\\n+ return Err(Error::RoundTrip {\\n+ src_encoding: src_encoding.name(),\\n+ dest_encoding: \\\"UTF-8\\\",\\n+ });\\n+ }\\n+ }\\n+ RoundTrip::Ignore => {}\\n+ }\\n+ Ok(())\\n+ }\\n+}\\ndiff --git a/gix-filter/src/worktree/encode_to_worktree.rs b/gix-filter/src/worktree/encode_to_worktree.rs\\nnew file mode 100644\\nindex 0000000..0a53419\\n--- /dev/null\\n+++ b/gix-filter/src/worktree/encode_to_worktree.rs\\n@@ -0,0 +1,69 @@\\n+/// The error returned by [`encode_to_worktree()][super::encode_to_worktree()].\\n+#[derive(Debug, thiserror::Error)]\\n+#[allow(missing_docs)]\\n+pub enum Error {\\n+ #[error(\\\"Cannot convert input of {input_len} UTF-8 bytes to target encoding without overflowing\\\")]\\n+ Overflow { input_len: usize },\\n+ #[error(\\\"Input was not UTF-8 encoded\\\")]\\n+ InputAsUtf8(#[from] std::str::Utf8Error),\\n+ #[error(\\\"The character '{character}' could not be mapped to the {worktree_encoding}\\\")]\\n+ Unmappable {\\n+ character: char,\\n+ worktree_encoding: &'static str,\\n+ },\\n+}\\n+\\n+pub(crate) mod function {\\n+ use super::Error;\\n+ use crate::clear_and_set_capacity;\\n+ use encoding_rs::EncoderResult;\\n+\\n+ /// Encode `src_utf8`, which is assumed to be UTF-8 encoded, according to `worktree_encoding` for placement in the working directory,\\n+ /// and write it to `buf`, possibly resizing it.\\n+ /// Note that the encoding is always applied, there is no conditional even if `worktree_encoding` and the `src` encoding are the same.\\n+ pub fn encode_to_worktree(\\n+ src_utf8: &[u8],\\n+ worktree_encoding: &'static encoding_rs::Encoding,\\n+ buf: &mut Vec,\\n+ ) -> Result<(), Error> {\\n+ let mut encoder = worktree_encoding.new_encoder();\\n+ let buf_len = encoder\\n+ .max_buffer_length_from_utf8_if_no_unmappables(src_utf8.len())\\n+ .ok_or(Error::Overflow {\\n+ input_len: src_utf8.len(),\\n+ })?;\\n+ clear_and_set_capacity(buf, buf_len);\\n+ // SAFETY: `clear_and_set_capacity` assure that we have the given `buf_len` allocated, so setting its length is only making available\\n+ // what is allocated. Later we will truncate to the amount of actually written bytes.\\n+ #[allow(unsafe_code)]\\n+ unsafe {\\n+ buf.set_len(buf_len);\\n+ }\\n+ let src = std::str::from_utf8(src_utf8)?;\\n+ let (res, read, written) = encoder.encode_from_utf8_without_replacement(src, buf, true);\\n+ match res {\\n+ EncoderResult::InputEmpty => {\\n+ assert!(\\n+ buf_len >= written,\\n+ \\\"encoding_rs estimates the maximum amount of bytes written correctly\\\"\\n+ );\\n+ assert_eq!(read, src_utf8.len(), \\\"input buffer should be fully consumed\\\");\\n+ // SAFETY: we trust that `encoding_rs` reports this number correctly, and truncate everything else.\\n+ #[allow(unsafe_code)]\\n+ unsafe {\\n+ buf.set_len(written);\\n+ }\\n+ }\\n+ EncoderResult::OutputFull => {\\n+ unreachable!(\\\"we assure that the output buffer is big enough as per the encoder's estimate\\\")\\n+ }\\n+ EncoderResult::Unmappable(c) => {\\n+ return Err(Error::Unmappable {\\n+ worktree_encoding: worktree_encoding.name(),\\n+ character: c,\\n+ })\\n+ }\\n+ }\\n+ Ok(())\\n+ }\\n+}\\ndiff --git a/gix-filter/src/worktree/encoding.rs b/gix-filter/src/worktree/encoding.rs\\nnew file mode 100644\\nindex 0000000..0b75adc\\n--- /dev/null\\n+++ b/gix-filter/src/worktree/encoding.rs\\n@@ -0,0 +1,31 @@\\n+use bstr::BStr;\\n+use encoding_rs::Encoding;\\n+\\n+///\\n+pub mod for_label {\\n+ use bstr::BString;\\n+\\n+ /// The error returned by [for_label()][super::for_label()].\\n+ #[derive(Debug, thiserror::Error)]\\n+ #[allow(missing_docs)]\\n+ pub enum Error {\\n+ #[error(\\\"An encoding named '{name}' is not known\\\")]\\n+ Unknown { name: BString },\\n+ }\\n+}\\n+\\n+/// Try to produce a new `Encoding` for `label` or report an error if it is not known.\\n+///\\n+/// ### Deviation\\n+///\\n+/// * There is no special handling of UTF-16LE/BE with checks if data contains a BOM or not, like `git` as we don't expect to have\\n+/// data available here.\\n+/// * Special `-BOM` suffixed versions of `UTF-16` encodings are not supported.\\n+pub fn for_label<'a>(label: impl Into<&'a BStr>) -> Result<&'static Encoding, for_label::Error> {\\n+ let mut label = label.into();\\n+ if label == \\\"latin-1\\\" {\\n+ label = \\\"ISO-8859-1\\\".into();\\n+ }\\n+ let enc = Encoding::for_label(label.as_ref()).ok_or_else(|| for_label::Error::Unknown { name: label.into() })?;\\n+ Ok(enc)\\n+}\\ndiff --git a/gix-filter/src/worktree/mod.rs b/gix-filter/src/worktree/mod.rs\\nnew file mode 100644\\nindex 0000000..3b13ea4\\n--- /dev/null\\n+++ b/gix-filter/src/worktree/mod.rs\\n@@ -0,0 +1,16 @@\\n+//! Worktree encodings are powered by the `encoding_rs` crate, which has a narrower focus than the `iconv` library. Thus this implementation\\n+//! is inherently more limited but will handle the common cases.\\n+//! \\n+//! Note that for encoding to legacy formats, [additional normalization steps](https://docs.rs/encoding_rs/0.8.32/encoding_rs/#preparing-text-for-the-encoders)\\n+//! can be taken, which we do not yet take unless there is specific examples or problems to solve.\\n+\\n+///\\n+pub mod encoding;\\n+\\n+///\\n+pub mod encode_to_git;\\n+pub use encode_to_git::function::encode_to_git;\\n+\\n+///\\n+pub mod encode_to_worktree;\\n+pub use encode_to_worktree::function::encode_to_worktree;\\ndiff --git a/gix-filter/tests/worktree/mod.rs b/gix-filter/tests/worktree/mod.rs\\nindex cc2c6f1..1eb1a8e 100644\\n--- a/gix-filter/tests/worktree/mod.rs\\n+++ b/gix-filter/tests/worktree/mod.rs\\n@@ -74,13 +74,28 @@ mod encode_to_git {\\n let input = &b\\\"hello\\\"[..];\\n for round_trip in [RoundTrip::Ignore, RoundTrip::Validate] {\\n let mut buf = Vec::new();\\n- worktree::encode_to_git(input, encoding(\\\"UTF-8\\\"), &mut buf, round_trip)?;\\n+ worktree::encode_to_git(input, encoding_rs::UTF_8, &mut buf, round_trip)?;\\n assert_eq!(buf.as_bstr(), input)\\n }\\n Ok(())\\n }\\n+}\\n+\\n+mod encode_to_worktree {\\n+ use bstr::ByteSlice;\\n+ use gix_filter::worktree;\\n+ use gix_filter::worktree::encode_to_git::RoundTrip;\\n \\n- fn encoding(label: &str) -> &'static encoding_rs::Encoding {\\n- worktree::encoding::for_label(label).expect(\\\"encoding is valid and known at compile time\\\")\\n+ #[test]\\n+ fn shift_jis() -> crate::Result {\\n+ let input = \\\"\\u30cf\\u30ed\\u30fc\\u30ef\\u30fc\\u30eb\\u30c9\\\";\\n+ let mut buf = Vec::new();\\n+ worktree::encode_to_worktree(input.as_bytes(), encoding_rs::SHIFT_JIS, &mut buf)?;\\n+\\n+ let mut re_encoded = Vec::new();\\n+ worktree::encode_to_git(&buf, encoding_rs::SHIFT_JIS, &mut re_encoded, RoundTrip::Validate)?;\\n+\\n+ assert_eq!(re_encoded.as_bstr(), input, \\\"this should be round-trippable too\\\");\\n+ Ok(())\\n }\\n }\\n\", \"diff --git a/ibis/tests/benchmarks/test_benchmarks.py b/ibis/tests/benchmarks/test_benchmarks.py\\nindex 78305bb..9c7e6d7 100644\\n--- a/ibis/tests/benchmarks/test_benchmarks.py\\n+++ b/ibis/tests/benchmarks/test_benchmarks.py\\n@@ -1,3 +1,4 @@\\n+import copy\\n import functools\\n import itertools\\n import string\\n@@ -340,8 +341,9 @@ def test_execute(benchmark, expression_fn, pt):\\n benchmark(expr.execute)\\n \\n \\n-def test_repr_tpc_h02(benchmark):\\n- part = ibis.table(\\n+@pytest.fixture\\n+def part():\\n+ return ibis.table(\\n dict(\\n p_partkey=\\\"int64\\\",\\n p_size=\\\"int64\\\",\\n@@ -350,7 +352,11 @@ def test_repr_tpc_h02(benchmark):\\n ),\\n name=\\\"part\\\",\\n )\\n- supplier = ibis.table(\\n+\\n+\\n+@pytest.fixture\\n+def supplier():\\n+ return ibis.table(\\n dict(\\n s_suppkey=\\\"int64\\\",\\n s_nationkey=\\\"int64\\\",\\n@@ -362,7 +368,11 @@ def test_repr_tpc_h02(benchmark):\\n ),\\n name=\\\"supplier\\\",\\n )\\n- partsupp = ibis.table(\\n+\\n+\\n+@pytest.fixture\\n+def partsupp():\\n+ return ibis.table(\\n dict(\\n ps_partkey=\\\"int64\\\",\\n ps_suppkey=\\\"int64\\\",\\n@@ -370,14 +380,25 @@ def test_repr_tpc_h02(benchmark):\\n ),\\n name=\\\"partsupp\\\",\\n )\\n- nation = ibis.table(\\n+\\n+\\n+@pytest.fixture\\n+def nation():\\n+ return ibis.table(\\n dict(n_nationkey=\\\"int64\\\", n_regionkey=\\\"int64\\\", n_name=\\\"string\\\"),\\n name=\\\"nation\\\",\\n )\\n- region = ibis.table(\\n+\\n+\\n+@pytest.fixture\\n+def region():\\n+ return ibis.table(\\n dict(r_regionkey=\\\"int64\\\", r_name=\\\"string\\\"), name=\\\"region\\\"\\n )\\n \\n+\\n+@pytest.fixture\\n+def tpc_h02(part, supplier, partsupp, nation, region):\\n REGION = \\\"EUROPE\\\"\\n SIZE = 25\\n TYPE = \\\"BRASS\\\"\\n@@ -420,7 +441,7 @@ def test_repr_tpc_h02(benchmark):\\n ]\\n )\\n \\n- expr = q.sort_by(\\n+ return q.sort_by(\\n [\\n ibis.desc(q.s_acctbal),\\n q.n_name,\\n@@ -429,7 +450,9 @@ def test_repr_tpc_h02(benchmark):\\n ]\\n ).limit(100)\\n \\n- benchmark(repr, expr)\\n+\\n+def test_repr_tpc_h02(benchmark, tpc_h02):\\n+ benchmark(repr, tpc_h02)\\n \\n \\n def test_repr_huge_union(benchmark):\\n@@ -478,3 +501,7 @@ def test_complex_datatype_builtins(benchmark, func):\\n )\\n )\\n benchmark(func, datatype)\\n+\\n+\\n+def test_large_expr_equals(benchmark, tpc_h02):\\n+ benchmark(ir.Expr.equals, tpc_h02, copy.deepcopy(tpc_h02))\\n\", \"diff --git a/.goreleaser.yaml b/.goreleaser.yaml\\nindex 46901cb..7d4d355 100644\\n--- a/.goreleaser.yaml\\n+++ b/.goreleaser.yaml\\n@@ -25,6 +25,7 @@ builds:\\n - amd64\\n - arm\\n - arm64\\n+ - ppc64\\n goarm:\\n - \\\"7\\\"\\n mod_timestamp: '{{ .CommitTimestamp }}'\\n\", \"diff --git a/.github/workflows/ibis-backends-cloud.yml b/.github/workflows/ibis-backends-cloud.yml\\nindex 321708e..b990984 100644\\n--- a/.github/workflows/ibis-backends-cloud.yml\\n+++ b/.github/workflows/ibis-backends-cloud.yml\\n@@ -29,7 +29,9 @@ jobs:\\n name: ${{ matrix.backend.title }} python-${{ matrix.python-version }}\\n # only a single bigquery or snowflake run at a time, otherwise test data is\\n # clobbered by concurrent runs\\n- concurrency: ${{ matrix.backend.name }}\\n+ concurrency:\\n+ group: ${{ matrix.backend.name }}\\n+ cancel-in-progress: false\\n runs-on: ubuntu-latest\\n strategy:\\n fail-fast: false\\n\", \"diff --git a/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java b/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\\nindex a4aee6b..bb523fa 100644\\n--- a/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\\n+++ b/atomix/cluster/src/test/java/io/atomix/cluster/messaging/impl/NettyMessagingServiceTlsTest.java\\n@@ -1,3 +1,18 @@\\n+/*\\n+ * Copyright \\u00a9 2020 camunda services GmbH (info@camunda.com)\\n+ *\\n+ * Licensed under the Apache License, Version 2.0 (the \\\"License\\\");\\n+ * you may not use this file except in compliance with the License.\\n+ * You may obtain a copy of the License at\\n+ *\\n+ * http://www.apache.org/licenses/LICENSE-2.0\\n+ *\\n+ * Unless required by applicable law or agreed to in writing, software\\n+ * distributed under the License is distributed on an \\\"AS IS\\\" BASIS,\\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\\n+ * See the License for the specific language governing permissions and\\n+ * limitations under the License.\\n+ */\\n package io.atomix.cluster.messaging.impl;\\n \\n import static org.assertj.core.api.Assertions.assertThat;\\n\"]"},"concern_count":{"kind":"number","value":5,"string":"5"},"shas":{"kind":"string","value":"[\"d1fed3e9907d0a9e3fe45dbfe2ff27bd10b3e1f4\", \"b700285c1f27588922d9c56527cee721bb884682\", \"e27e3a6478d59eb0f93af0a51a9c474bad6f8350\", \"19514bc68624a964c63fc217f163f7b11f3dfe82\", \"cbe62140ce219da84772e21e7cfb4b5c2a25c1b8\"]"},"types":{"kind":"string","value":"[\"feat\", \"test\", \"build\", \"ci\", \"docs\"]"}}},{"rowIdx":1376,"cells":{"commit_message":{"kind":"string","value":"extract _value expr from predicate,bundle and tree shake assets with webpack,fix `get-deploy-tags.sh`,Added tooltip for Data sources table buttons only on small screen,fix readme"},"diff":{"kind":"string","value":"[\"diff --git a/predicate/src/delete_predicate.rs b/predicate/src/delete_predicate.rs\\nindex 02e679a..6368df3 100644\\n--- a/predicate/src/delete_predicate.rs\\n+++ b/predicate/src/delete_predicate.rs\\n@@ -120,6 +120,7 @@ impl From for crate::predicate::Predicate {\\n partition_key: None,\\n range: Some(pred.range),\\n exprs: pred.exprs.into_iter().map(|expr| expr.into()).collect(),\\n+ value_expr: vec![],\\n }\\n }\\n }\\ndiff --git a/predicate/src/predicate.rs b/predicate/src/predicate.rs\\nindex d7f3b62..2aa8fdf 100644\\n--- a/predicate/src/predicate.rs\\n+++ b/predicate/src/predicate.rs\\n@@ -11,7 +11,7 @@ use std::{\\n use data_types::timestamp::TimestampRange;\\n use datafusion::{\\n error::DataFusionError,\\n- logical_plan::{col, lit_timestamp_nano, Expr, Operator},\\n+ logical_plan::{col, lit_timestamp_nano, Column, Expr, Operator},\\n optimizer::utils,\\n };\\n use datafusion_util::{make_range_expr, AndExprBuilder};\\n@@ -26,6 +26,7 @@ pub const EMPTY_PREDICATE: Predicate = Predicate {\\n exprs: vec![],\\n range: None,\\n partition_key: None,\\n+ value_expr: vec![],\\n };\\n \\n #[derive(Debug, Clone, Copy)]\\n@@ -72,6 +73,11 @@ pub struct Predicate {\\n /// these expressions should be returned. Other rows are excluded\\n /// from the results.\\n pub exprs: Vec,\\n+\\n+ /// Optional arbitrary predicates on the special `_value` column. These\\n+ /// expressions are applied to `field_columns` projections in the form of\\n+ /// `CASE` statement conditions.\\n+ pub value_expr: Vec,\\n }\\n \\n impl Predicate {\\n@@ -469,6 +475,14 @@ impl PredicateBuilder {\\n }\\n }\\n \\n+// A representation of the `BinaryExpr` variant of a Datafusion expression.\\n+#[derive(Clone, Debug, PartialEq, PartialOrd)]\\n+pub struct BinaryExpr {\\n+ pub left: Column,\\n+ pub op: Operator,\\n+ pub right: Expr,\\n+}\\n+\\n #[cfg(test)]\\n mod tests {\\n use super::*;\\ndiff --git a/query/src/frontend/influxrpc.rs b/query/src/frontend/influxrpc.rs\\nindex 5ac7a2e..70c43f2 100644\\n--- a/query/src/frontend/influxrpc.rs\\n+++ b/query/src/frontend/influxrpc.rs\\n@@ -9,8 +9,7 @@ use data_types::chunk_metadata::ChunkId;\\n use datafusion::{\\n error::{DataFusionError, Result as DatafusionResult},\\n logical_plan::{\\n- binary_expr, lit, Column, DFSchemaRef, Expr, ExprRewriter, LogicalPlan, LogicalPlanBuilder,\\n- Operator,\\n+ lit, Column, DFSchemaRef, Expr, ExprRewriter, LogicalPlan, LogicalPlanBuilder, Operator,\\n },\\n optimizer::utils::expr_to_columns,\\n prelude::col,\\n@@ -20,7 +19,7 @@ use datafusion_util::AsExpr;\\n \\n use hashbrown::{HashMap, HashSet};\\n use observability_deps::tracing::{debug, trace};\\n-use predicate::predicate::{Predicate, PredicateMatch};\\n+use predicate::predicate::{BinaryExpr, Predicate, PredicateMatch};\\n use schema::selection::Selection;\\n use schema::{InfluxColumnType, Schema, TIME_COLUMN_NAME};\\n use snafu::{ensure, OptionExt, ResultExt, Snafu};\\n@@ -243,7 +242,6 @@ impl InfluxRpcPlanner {\\n // and which chunks needs full plan and group them into their table\\n for chunk in database.chunks(normalizer.unnormalized()) {\\n let table_name = chunk.table_name();\\n- let schema = chunk.schema();\\n \\n // Table is already in the returned table list, no longer needs to discover it from other chunks\\n if builder.contains_meta_data_table(table_name.to_string()) {\\n@@ -260,7 +258,7 @@ impl InfluxRpcPlanner {\\n } else {\\n // See if we can have enough info from the chunk's meta data to answer\\n // that this table participates in the request\\n- let predicate = normalizer.normalized(table_name, schema);\\n+ let predicate = normalizer.normalized(table_name);\\n //\\n // Try and apply the predicate using only metadata\\n let pred_result = chunk\\n@@ -346,7 +344,7 @@ impl InfluxRpcPlanner {\\n let mut do_full_plan = chunk.has_delete_predicates();\\n \\n let table_name = chunk.table_name();\\n- let predicate = normalizer.normalized(table_name, chunk.schema());\\n+ let predicate = normalizer.normalized(table_name);\\n \\n // Try and apply the predicate using only metadata\\n let pred_result = chunk\\n@@ -474,7 +472,7 @@ impl InfluxRpcPlanner {\\n let mut do_full_plan = chunk.has_delete_predicates();\\n \\n let table_name = chunk.table_name();\\n- let predicate = normalizer.normalized(table_name, chunk.schema());\\n+ let predicate = normalizer.normalized(table_name);\\n \\n // Try and apply the predicate using only metadata\\n let pred_result = chunk\\n@@ -821,7 +819,7 @@ impl InfluxRpcPlanner {\\n {\\n let mut table_chunks = BTreeMap::new();\\n for chunk in chunks {\\n- let predicate = normalizer.normalized(chunk.table_name(), chunk.schema());\\n+ let predicate = normalizer.normalized(chunk.table_name());\\n // Try and apply the predicate using only metadata\\n let pred_result = chunk\\n .apply_predicate_to_metadata(&predicate)\\n@@ -1040,9 +1038,8 @@ impl InfluxRpcPlanner {\\n C: QueryChunk + 'static,\\n {\\n let table_name = table_name.as_ref();\\n- let scan_and_filter =\\n- self.scan_and_filter(table_name, Arc::clone(&schema), normalizer, chunks)?;\\n- let predicate = normalizer.normalized(table_name, schema);\\n+ let scan_and_filter = self.scan_and_filter(table_name, schema, normalizer, chunks)?;\\n+ let predicate = normalizer.normalized(table_name);\\n \\n let TableScanAndFilter {\\n plan_builder,\\n@@ -1152,9 +1149,8 @@ impl InfluxRpcPlanner {\\n C: QueryChunk + 'static,\\n {\\n let table_name = table_name.into();\\n- let scan_and_filter =\\n- self.scan_and_filter(&table_name, Arc::clone(&schema), normalizer, chunks)?;\\n- let predicate = normalizer.normalized(&table_name, schema);\\n+ let scan_and_filter = self.scan_and_filter(&table_name, schema, normalizer, chunks)?;\\n+ let predicate = normalizer.normalized(&table_name);\\n \\n let TableScanAndFilter {\\n plan_builder,\\n@@ -1263,9 +1259,8 @@ impl InfluxRpcPlanner {\\n C: QueryChunk + 'static,\\n {\\n let table_name = table_name.into();\\n- let scan_and_filter =\\n- self.scan_and_filter(&table_name, Arc::clone(&schema), normalizer, chunks)?;\\n- let predicate = normalizer.normalized(&table_name, schema);\\n+ let scan_and_filter = self.scan_and_filter(&table_name, schema, normalizer, chunks)?;\\n+ let predicate = normalizer.normalized(&table_name);\\n \\n let TableScanAndFilter {\\n plan_builder,\\n@@ -1342,7 +1337,7 @@ impl InfluxRpcPlanner {\\n where\\n C: QueryChunk + 'static,\\n {\\n- let predicate = normalizer.normalized(table_name, Arc::clone(&schema));\\n+ let predicate = normalizer.normalized(table_name);\\n \\n // Scan all columns to begin with (DataFusion projection\\n // push-down optimization will prune out unneeded columns later)\\n@@ -1701,13 +1696,13 @@ impl PredicateNormalizer {\\n \\n /// Return a reference to a predicate specialized for `table_name` based on\\n /// its `schema`.\\n- fn normalized(&mut self, table_name: &str, schema: Arc) -> Arc {\\n+ fn normalized(&mut self, table_name: &str) -> Arc {\\n if let Some(normalized_predicate) = self.normalized.get(table_name) {\\n return normalized_predicate.inner();\\n }\\n \\n let normalized_predicate =\\n- TableNormalizedPredicate::new(table_name, schema, self.unnormalized.clone());\\n+ TableNormalizedPredicate::new(table_name, self.unnormalized.clone());\\n \\n self.normalized\\n .entry(table_name.to_string())\\n@@ -1752,13 +1747,18 @@ struct TableNormalizedPredicate {\\n }\\n \\n impl TableNormalizedPredicate {\\n- fn new(table_name: &str, schema: Arc, mut inner: Predicate) -> Self {\\n+ fn new(table_name: &str, mut inner: Predicate) -> Self {\\n let mut field_projections = BTreeSet::new();\\n+ let mut field_value_exprs = vec![];\\n+\\n inner.exprs = inner\\n .exprs\\n .into_iter()\\n .map(|e| rewrite_measurement_references(table_name, e))\\n- .map(|e| rewrite_field_value_references(Arc::clone(&schema), e))\\n+ // Rewrite any references to `_value = some_value` to literal true values.\\n+ // Keeps track of these expressions, which can then be used to\\n+ // augment field projections with conditions using `CASE` statements.\\n+ .map(|e| rewrite_field_value_references(&mut field_value_exprs, e))\\n .map(|e| {\\n // Rewrite any references to `_field = a_field_name` with a literal true\\n // and keep track of referenced field names to add to the field\\n@@ -1766,6 +1766,8 @@ impl TableNormalizedPredicate {\\n rewrite_field_column_references(&mut field_projections, e)\\n })\\n .collect::>();\\n+ // Store any field value (`_value`) expressions on the `Predicate`.\\n+ inner.value_expr = field_value_exprs;\\n \\n if !field_projections.is_empty() {\\n match &mut inner.field_columns {\\n@@ -1811,23 +1813,19 @@ impl ExprRewriter for MeasurementRewriter<'_> {\\n }\\n }\\n \\n-/// Rewrites a predicate on `_value` to a disjunctive set of expressions on each\\n-/// distinct field column in the table.\\n-///\\n-/// For example, the predicate `_value = 1.77` on a table with three field\\n-/// columns would be rewritten to:\\n-///\\n-/// `(field1 = 1.77 OR field2 = 1.77 OR field3 = 1.77)`.\\n-fn rewrite_field_value_references(schema: Arc, expr: Expr) -> Expr {\\n- let mut rewriter = FieldValueRewriter { schema };\\n+/// Rewrites an expression on `_value` as a boolean true literal, pushing any\\n+/// encountered expressions onto `value_exprs` so they can be moved onto column\\n+/// projections.\\n+fn rewrite_field_value_references(value_exprs: &mut Vec, expr: Expr) -> Expr {\\n+ let mut rewriter = FieldValueRewriter { value_exprs };\\n expr.rewrite(&mut rewriter).expect(\\\"rewrite is infallible\\\")\\n }\\n \\n-struct FieldValueRewriter {\\n- schema: Arc,\\n+struct FieldValueRewriter<'a> {\\n+ value_exprs: &'a mut Vec,\\n }\\n \\n-impl ExprRewriter for FieldValueRewriter {\\n+impl<'a> ExprRewriter for FieldValueRewriter<'a> {\\n fn mutate(&mut self, expr: Expr) -> DatafusionResult {\\n Ok(match expr {\\n Expr::BinaryExpr {\\n@@ -1836,21 +1834,16 @@ impl ExprRewriter for FieldValueRewriter {\\n ref right,\\n } => {\\n if let Expr::Column(inner) = &**left {\\n- if inner.name != VALUE_COLUMN_NAME {\\n- return Ok(expr); // column name not `_value`.\\n+ if inner.name == VALUE_COLUMN_NAME {\\n+ self.value_exprs.push(BinaryExpr {\\n+ left: inner.to_owned(),\\n+ op,\\n+ right: right.as_expr(),\\n+ });\\n+ return Ok(Expr::Literal(ScalarValue::Boolean(Some(true))));\\n }\\n-\\n- // build a disjunctive expression using binary expressions\\n- // for each field column and the original expression's\\n- // operator and rhs.\\n- self.schema\\n- .fields_iter()\\n- .map(|field| binary_expr(col(field.name()), op, *right.clone()))\\n- .reduce(|a, b| a.or(b))\\n- .expect(\\\"at least one field column\\\")\\n- } else {\\n- expr\\n }\\n+ expr\\n }\\n _ => expr,\\n })\\n@@ -1918,7 +1911,7 @@ pub fn schema_has_all_expr_columns(schema: &Schema, expr: &Expr) -> bool {\\n \\n #[cfg(test)]\\n mod tests {\\n- use datafusion::logical_plan::Operator;\\n+ use datafusion::logical_plan::{binary_expr, Operator};\\n use schema::builder::SchemaBuilder;\\n \\n use super::*;\\n@@ -1958,56 +1951,57 @@ mod tests {\\n \\n #[test]\\n fn test_field_value_rewriter() {\\n- let schema = SchemaBuilder::new()\\n- .tag(\\\"t1\\\")\\n- .tag(\\\"t2\\\")\\n- .field(\\\"f1\\\", DataType::Float64)\\n- .field(\\\"f2\\\", DataType::Float64)\\n- .timestamp()\\n- .build()\\n- .unwrap();\\n-\\n let mut rewriter = FieldValueRewriter {\\n- schema: Arc::new(schema),\\n+ value_exprs: &mut vec![],\\n };\\n \\n let cases = vec![\\n (\\n binary_expr(col(\\\"f1\\\"), Operator::Eq, lit(1.82)),\\n binary_expr(col(\\\"f1\\\"), Operator::Eq, lit(1.82)),\\n+ vec![],\\n ),\\n- (col(\\\"t2\\\"), col(\\\"t2\\\")),\\n+ (col(\\\"t2\\\"), col(\\\"t2\\\"), vec![]),\\n (\\n binary_expr(col(VALUE_COLUMN_NAME), Operator::Eq, lit(1.82)),\\n- //\\n- // _value = 1.82 -> f1 = (1.82 OR f2 = 1.82)\\n- //\\n- binary_expr(\\n- binary_expr(col(\\\"f1\\\"), Operator::Eq, lit(1.82)),\\n- Operator::Or,\\n- binary_expr(col(\\\"f2\\\"), Operator::Eq, lit(1.82)),\\n- ),\\n+ // _value = 1.82 -> true\\n+ lit(true),\\n+ vec![BinaryExpr {\\n+ left: Column {\\n+ relation: None,\\n+ name: VALUE_COLUMN_NAME.into(),\\n+ },\\n+ op: Operator::Eq,\\n+ right: lit(1.82),\\n+ }],\\n ),\\n ];\\n \\n- for (input, exp) in cases {\\n+ for (input, exp, mut value_exprs) in cases {\\n let rewritten = input.rewrite(&mut rewriter).unwrap();\\n assert_eq!(rewritten, exp);\\n+ assert_eq!(rewriter.value_exprs, &mut value_exprs);\\n }\\n \\n // Test case with single field.\\n- let schema = SchemaBuilder::new()\\n- .field(\\\"f1\\\", DataType::Float64)\\n- .timestamp()\\n- .build()\\n- .unwrap();\\n let mut rewriter = FieldValueRewriter {\\n- schema: Arc::new(schema),\\n+ value_exprs: &mut vec![],\\n };\\n \\n let input = binary_expr(col(VALUE_COLUMN_NAME), Operator::Gt, lit(1.88));\\n let rewritten = input.rewrite(&mut rewriter).unwrap();\\n- assert_eq!(rewritten, binary_expr(col(\\\"f1\\\"), Operator::Gt, lit(1.88)));\\n+ assert_eq!(rewritten, lit(true));\\n+ assert_eq!(\\n+ rewriter.value_exprs,\\n+ &mut vec![BinaryExpr {\\n+ left: Column {\\n+ relation: None,\\n+ name: VALUE_COLUMN_NAME.into(),\\n+ },\\n+ op: Operator::Gt,\\n+ right: lit(1.88),\\n+ }]\\n+ );\\n }\\n \\n #[test]\\n\", \"diff --git a/package.json b/package.json\\nindex c8051d2..b0a97fb 100644\\n--- a/package.json\\n+++ b/package.json\\n@@ -60,6 +60,7 @@\\n \\\"babel-cli\\\": \\\"^6.16.0\\\",\\n \\\"babel-core\\\": \\\"^6.16.0\\\",\\n \\\"babel-eslint\\\": \\\"^7.0.0\\\",\\n+ \\\"babel-loader\\\": \\\"^6.2.5\\\",\\n \\\"babel-plugin-transform-class-properties\\\": \\\"^6.10.2\\\",\\n \\\"babel-plugin-transform-flow-strip-types\\\": \\\"^6.14.0\\\",\\n \\\"babel-preset-es2015-node6\\\": \\\"^0.3.0\\\",\\n@@ -82,6 +83,7 @@\\n \\\"eslint-plugin-react\\\": \\\"^6.3.0\\\",\\n \\\"flow-bin\\\": \\\"^0.33.0\\\",\\n \\\"jsdom\\\": \\\"^9.4.2\\\",\\n+ \\\"json-loader\\\": \\\"^0.5.4\\\",\\n \\\"jsx-chai\\\": \\\"^4.0.0\\\",\\n \\\"mocha\\\": \\\"^3.0.2\\\",\\n \\\"mock-require\\\": \\\"^1.3.0\\\",\\n@@ -91,6 +93,8 @@\\n \\\"rimraf\\\": \\\"^2.5.2\\\",\\n \\\"sinon\\\": \\\"^1.17.6\\\",\\n \\\"sinon-chai\\\": \\\"^2.8.0\\\",\\n- \\\"watch\\\": \\\"^1.0.0\\\"\\n+ \\\"source-map-support\\\": \\\"^0.4.3\\\",\\n+ \\\"watch\\\": \\\"^1.0.0\\\",\\n+ \\\"webpack\\\": \\\"^1.13.2\\\"\\n }\\n }\\ndiff --git a/webpack.config.js b/webpack.config.js\\nnew file mode 100644\\nindex 0000000..0ca6da1\\n--- /dev/null\\n+++ b/webpack.config.js\\n@@ -0,0 +1,44 @@\\n+const webpack = require('webpack');\\n+const path = require('path');\\n+const fs = require('fs');\\n+\\n+const nodeModules = {\\n+ zmq: 'commonjs zmq',\\n+ jmp: 'commonjs jmp',\\n+ github: 'commonjs github',\\n+};\\n+\\n+module.exports = {\\n+ entry: './src/notebook/index.js',\\n+ target: 'electron-renderer',\\n+ output: {\\n+ path: path.join(__dirname, 'app', 'build'),\\n+ filename: 'webpacked-notebook.js'\\n+ },\\n+ module: {\\n+ loaders: [\\n+ { test: /\\\\.js$/, exclude: /node_modules/, loaders: ['babel'] },\\n+ { test: /\\\\.json$/, loader: 'json-loader' },\\n+ ]\\n+ },\\n+ resolve: {\\n+ extensions: ['', '.js', '.jsx'],\\n+ root: path.join(__dirname, 'app'),\\n+ // Webpack 1\\n+ modulesDirectories: [\\n+ path.resolve(__dirname, 'app', 'node_modules'),\\n+ path.resolve(__dirname, 'node_modules'),\\n+ ],\\n+ // Webpack 2\\n+ modules: [\\n+ path.resolve(__dirname, 'app', 'node_modules'),\\n+ ],\\n+ },\\n+ externals: nodeModules,\\n+ plugins: [\\n+ new webpack.IgnorePlugin(/\\\\.(css|less)$/),\\n+ new webpack.BannerPlugin('require(\\\"source-map-support\\\").install();',\\n+ { raw: true, entryOnly: false })\\n+ ],\\n+ devtool: 'sourcemap'\\n+};\\n\", \"diff --git a/.circleci/get-deploy-tags.sh b/.circleci/get-deploy-tags.sh\\nindex f80c8cb..7ddfa62 100755\\n--- a/.circleci/get-deploy-tags.sh\\n+++ b/.circleci/get-deploy-tags.sh\\n@@ -20,7 +20,7 @@\\n set -euo pipefail\\n \\n DOCKER_IMAGE_TAG=${1}\\n-DOCKER_IMAGE=\\\"quay.io/influxdb/fusion\\\"\\n+DOCKER_IMAGE=\\\"quay.io/influxdb/iox\\\"\\n APP_NAME=\\\"IOx\\\"\\n \\n DOCKER_IMAGE_DIGEST=\\\"$(docker image inspect \\\"${DOCKER_IMAGE}:${DOCKER_IMAGE_TAG}\\\" --format '{{ if eq (len .RepoDigests) 1 }}{{index .RepoDigests 0}}{{ end }}')\\\"\\n\", \"diff --git a/packages/nc-gui/components/dashboard/settings/DataSources.vue b/packages/nc-gui/components/dashboard/settings/DataSources.vue\\nindex 78caa98..0ed5df9 100644\\n--- a/packages/nc-gui/components/dashboard/settings/DataSources.vue\\n+++ b/packages/nc-gui/components/dashboard/settings/DataSources.vue\\n@@ -351,59 +351,78 @@ const isEditBaseModalOpen = computed({\\n \\n
\\n
\\n- \\n-
\\n- \\n-
\\n- {{ $t('tooltip.metaSync') }}\\n+ \\n+ \\n+ \\n+
\\n+ \\n+
\\n+ {{ $t('tooltip.metaSync') }}\\n+
\\n
\\n-
\\n- \\n- \\n-
\\n- \\n-
\\n- {{ $t('title.relations') }}\\n+ \\n+ \\n+ \\n+ \\n+ \\n+
\\n+ \\n+
\\n+ {{ $t('title.relations') }}\\n+
\\n
\\n-
\\n- \\n- \\n-
\\n- \\n-
\\n- {{ $t('labels.uiAcl') }}\\n+ \\n+ \\n+ \\n+ \\n+ \\n+
\\n+ \\n+
\\n+ {{ $t('labels.uiAcl') }}\\n+
\\n
\\n-
\\n- \\n- \\n-
\\n- \\n-
\\n- {{ $t('title.audit') }}\\n+ \\n+ \\n+ \\n+ \\n+ \\n+
\\n+ \\n+
\\n+ {{ $t('title.audit') }}\\n+
\\n
\\n-
\\n- \\n+ \\n+ \\n
\\n
\\n
\\n@@ -450,67 +469,92 @@ const isEditBaseModalOpen = computed({\\n \\n
\\n
\\n- \\n-
\\n- \\n-
\\n- {{ $t('title.relations') }}\\n+ \\n+ \\n+ \\n+
\\n+ \\n+
\\n+ {{ $t('title.relations') }}\\n+
\\n
\\n-
\\n- \\n+ \\n+ \\n+ \\n+ \\n+ \\n+
\\n+ \\n+
\\n+ {{ $t('labels.uiAcl') }}\\n+
\\n+
\\n+ \\n+
\\n+ \\n+ \\n+ \\n+
\\n+ \\n+
\\n+ {{ $t('tooltip.metaSync') }}\\n+
\\n+
\\n+ \\n+
\\n+
\\n+
\\n+
\\n+ \\n+ \\n \\n-
\\n- \\n-
\\n- {{ $t('labels.uiAcl') }}\\n-
\\n-
\\n+ \\n \\n+
\\n+ \\n+ \\n \\n-
\\n- \\n-
\\n- {{ $t('tooltip.metaSync') }}\\n-
\\n-
\\n+ \\n \\n-
\\n-
\\n-
\\n- \\n- \\n- \\n- \\n- \\n- \\n+ \\n
\\n
\\n \\ndiff --git a/packages/nc-gui/components/nc/Tooltip.vue b/packages/nc-gui/components/nc/Tooltip.vue\\nindex 0810b8b..97b159e 100644\\n--- a/packages/nc-gui/components/nc/Tooltip.vue\\n+++ b/packages/nc-gui/components/nc/Tooltip.vue\\n@@ -12,6 +12,7 @@ interface Props {\\n disabled?: boolean\\n placement?: TooltipPlacement | undefined\\n hideOnClick?: boolean\\n+ overlayClassName?: string\\n }\\n \\n const props = defineProps()\\n@@ -36,6 +37,8 @@ const attrs = useAttrs()\\n \\n const isKeyPressed = ref(false)\\n \\n+const overlayClassName = computed(() => props.overlayClassName)\\n+\\n onKeyStroke(\\n (e) => e.key === modifierKey.value,\\n (e) => {\\n@@ -100,7 +103,7 @@ const onClick = () => {\\n \\n- \\n- \\n- \\n
\\n \\n Delete Selected Rows\\n \\n \\n- \\n- \\n- \\n- \\n
\\n \\n Delete Selected Rows\\n \\n \\n- \\n- \\n- \\n- \\n
\\n \\n Delete Selected Rows\\n \\n \\n-